[ 494.600934] env[61570]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61570) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.601287] env[61570]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61570) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.601328] env[61570]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61570) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.601672] env[61570]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 494.697501] env[61570]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61570) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 494.708510] env[61570]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=61570) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 494.853948] env[61570]: INFO nova.virt.driver [None req-78466813-070f-4dbc-ad8e-edd49593cba1 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 494.941498] env[61570]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.941683] env[61570]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.941810] env[61570]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61570) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 498.211254] env[61570]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f0e05ca6-2f7d-43a7-8a51-363d73522f5e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.227396] env[61570]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61570) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 498.227605] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-c06b9015-dfb2-4ac4-8acd-d421b2c9b9da {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.260438] env[61570]: INFO oslo_vmware.api [-] Successfully established new session; session ID is ab2e3. [ 498.260719] env[61570]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.319s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.261204] env[61570]: INFO nova.virt.vmwareapi.driver [None req-78466813-070f-4dbc-ad8e-edd49593cba1 None None] VMware vCenter version: 7.0.3 [ 498.264615] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a332541-4924-46be-bf64-3d282c1c7fd0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.283075] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc8fd31-e30d-4492-b2e5-9a7ce7559b62 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.289911] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5691a849-3d54-426c-b116-be1202faeaf8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.297264] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3997a6f8-122c-444b-a689-776b9affe28b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.310867] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6d32c6-f7a9-4263-9b31-f419bab9317c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.317043] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb389ea-c9b4-466b-9078-33b4d574c05d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.347932] env[61570]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-b4c8a2ab-424a-45e2-92fd-954f61b12727 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.353811] env[61570]: DEBUG nova.virt.vmwareapi.driver [None req-78466813-070f-4dbc-ad8e-edd49593cba1 None None] Extension org.openstack.compute already exists. {{(pid=61570) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 498.356504] env[61570]: INFO nova.compute.provider_config [None req-78466813-070f-4dbc-ad8e-edd49593cba1 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 498.374449] env[61570]: DEBUG nova.context [None req-78466813-070f-4dbc-ad8e-edd49593cba1 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),84dd5bbf-1f00-4b99-82bf-3862070e86eb(cell1) {{(pid=61570) load_cells /opt/stack/nova/nova/context.py:464}} [ 498.376435] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.376725] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.377405] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.377843] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Acquiring lock "84dd5bbf-1f00-4b99-82bf-3862070e86eb" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.378052] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Lock "84dd5bbf-1f00-4b99-82bf-3862070e86eb" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.379145] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Lock "84dd5bbf-1f00-4b99-82bf-3862070e86eb" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.400057] env[61570]: INFO dbcounter [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Registered counter for database nova_cell0 [ 498.407984] env[61570]: INFO dbcounter [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Registered counter for database nova_cell1 [ 498.410916] env[61570]: DEBUG oslo_db.sqlalchemy.engines [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61570) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.414863] env[61570]: DEBUG dbcounter [-] [61570] Writer thread running {{(pid=61570) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.415185] env[61570]: DEBUG oslo_db.sqlalchemy.engines [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61570) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.417856] env[61570]: ERROR nova.db.main.api [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.417856] env[61570]: result = function(*args, **kwargs) [ 498.417856] env[61570]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.417856] env[61570]: return func(*args, **kwargs) [ 498.417856] env[61570]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.417856] env[61570]: result = fn(*args, **kwargs) [ 498.417856] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.417856] env[61570]: return f(*args, **kwargs) [ 498.417856] env[61570]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.417856] env[61570]: return db.service_get_minimum_version(context, binaries) [ 498.417856] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.417856] env[61570]: _check_db_access() [ 498.417856] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.417856] env[61570]: stacktrace = ''.join(traceback.format_stack()) [ 498.417856] env[61570]: [ 498.419383] env[61570]: DEBUG dbcounter [-] [61570] Writer thread running {{(pid=61570) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.420399] env[61570]: ERROR nova.db.main.api [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.420399] env[61570]: result = function(*args, **kwargs) [ 498.420399] env[61570]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.420399] env[61570]: return func(*args, **kwargs) [ 498.420399] env[61570]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.420399] env[61570]: result = fn(*args, **kwargs) [ 498.420399] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.420399] env[61570]: return f(*args, **kwargs) [ 498.420399] env[61570]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.420399] env[61570]: return db.service_get_minimum_version(context, binaries) [ 498.420399] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.420399] env[61570]: _check_db_access() [ 498.420399] env[61570]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.420399] env[61570]: stacktrace = ''.join(traceback.format_stack()) [ 498.420399] env[61570]: [ 498.420788] env[61570]: WARNING nova.objects.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 498.420906] env[61570]: WARNING nova.objects.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Failed to get minimum service version for cell 84dd5bbf-1f00-4b99-82bf-3862070e86eb [ 498.421331] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Acquiring lock "singleton_lock" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 498.421493] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Acquired lock "singleton_lock" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 498.421743] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Releasing lock "singleton_lock" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 498.422092] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Full set of CONF: {{(pid=61570) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 498.422244] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ******************************************************************************** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 498.422374] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] Configuration options gathered from: {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 498.422510] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 498.422723] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 498.422866] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ================================================================================ {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 498.423096] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] allow_resize_to_same_host = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.423269] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] arq_binding_timeout = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.423402] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] backdoor_port = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.423532] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] backdoor_socket = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.423695] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] block_device_allocate_retries = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.423856] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] block_device_allocate_retries_interval = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424039] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cert = self.pem {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424212] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424383] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute_monitors = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424552] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] config_dir = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424725] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] config_drive_format = iso9660 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.424861] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425034] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] config_source = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425205] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] console_host = devstack {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425370] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] control_exchange = nova {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425528] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cpu_allocation_ratio = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425705] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] daemon = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.425892] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] debug = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426065] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_access_ip_network_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426237] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_availability_zone = nova {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426394] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_ephemeral_format = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426561] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_green_pool_size = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426792] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.426958] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] default_schedule_zone = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427130] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] disk_allocation_ratio = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427292] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] enable_new_services = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427468] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] enabled_apis = ['osapi_compute'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427630] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] enabled_ssl_apis = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427794] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] flat_injected = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.427951] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] force_config_drive = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.428123] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] force_raw_images = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.428294] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] graceful_shutdown_timeout = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.428453] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] heal_instance_info_cache_interval = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.428700] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] host = cpu-1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.428899] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429082] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429246] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429487] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429626] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_build_timeout = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429811] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_delete_interval = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.429989] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_format = [instance: %(uuid)s] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.430177] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_name_template = instance-%08x {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.430345] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_usage_audit = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.430518] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_usage_audit_period = month {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.430686] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.430854] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431053] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] internal_service_availability_zone = internal {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431223] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] key = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431387] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] live_migration_retry_count = 30 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431551] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_config_append = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431727] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.431929] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_dir = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432108] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432241] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_options = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432405] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_rotate_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432576] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_rotate_interval_type = days {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432747] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] log_rotation_type = none {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.432878] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433013] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433191] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433359] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433489] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433652] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] long_rpc_timeout = 1800 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433814] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_concurrent_builds = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.433972] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_concurrent_live_migrations = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434148] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_concurrent_snapshots = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434309] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_local_block_devices = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434467] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_logfile_count = 30 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434627] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] max_logfile_size_mb = 200 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434807] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] maximum_instance_delete_attempts = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.434994] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metadata_listen = 0.0.0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.435179] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metadata_listen_port = 8775 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.435350] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metadata_workers = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.435511] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] migrate_max_retries = -1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.435678] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] mkisofs_cmd = genisoimage {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.435884] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436025] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] my_ip = 10.180.1.21 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436192] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] network_allocate_retries = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436370] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436554] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436716] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] osapi_compute_listen_port = 8774 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.436885] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] osapi_compute_unique_server_name_scope = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437063] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] osapi_compute_workers = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437227] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] password_length = 12 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437388] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] periodic_enable = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437545] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] periodic_fuzzy_delay = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437714] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] pointer_model = usbtablet {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.437905] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] preallocate_images = none {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438085] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] publish_errors = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438220] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] pybasedir = /opt/stack/nova {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438380] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ram_allocation_ratio = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438539] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rate_limit_burst = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438740] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rate_limit_except_level = CRITICAL {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.438903] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rate_limit_interval = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439075] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reboot_timeout = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439361] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reclaim_instance_interval = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439416] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] record = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439595] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reimage_timeout_per_gb = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439721] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] report_interval = 120 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.439895] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rescue_timeout = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440066] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reserved_host_cpus = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440228] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reserved_host_disk_mb = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440386] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reserved_host_memory_mb = 512 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440544] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] reserved_huge_pages = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440700] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] resize_confirm_window = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.440879] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] resize_fs_using_block_device = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441086] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] resume_guests_state_on_host_boot = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441266] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441429] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] rpc_response_timeout = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441590] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] run_external_periodic_tasks = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441761] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] running_deleted_instance_action = reap {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.441923] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442092] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] running_deleted_instance_timeout = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442251] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler_instance_sync_interval = 120 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442417] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_down_time = 720 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442583] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] servicegroup_driver = db {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442742] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] shelved_offload_time = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.442898] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] shelved_poll_interval = 3600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.443074] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] shutdown_timeout = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.443236] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] source_is_ipv6 = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.443395] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ssl_only = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.443642] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.443826] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] sync_power_state_interval = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444031] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] sync_power_state_pool_size = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444213] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] syslog_log_facility = LOG_USER {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444373] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] tempdir = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444533] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] timeout_nbd = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444707] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] transport_url = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.444864] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] update_resources_interval = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445032] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_cow_images = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445195] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_eventlog = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445355] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_journal = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445513] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_json = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445670] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_rootwrap_daemon = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445826] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_stderr = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.445982] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] use_syslog = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446151] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vcpu_pin_set = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446317] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plugging_is_fatal = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446480] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plugging_timeout = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446645] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] virt_mkfs = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446803] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] volume_usage_poll_interval = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.446982] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] watch_log_file = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.447172] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] web = /usr/share/spice-html5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.447357] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_concurrency.disable_process_locking = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.447643] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.447825] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.447995] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.448177] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.448354] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.448522] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.448728] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.auth_strategy = keystone {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.448902] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.compute_link_prefix = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.449090] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.449269] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.dhcp_domain = novalocal {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.449440] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.enable_instance_password = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.449641] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.glance_link_prefix = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.449822] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450018] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450192] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.instance_list_per_project_cells = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450358] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.list_records_by_skipping_down_cells = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450522] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.local_metadata_per_cell = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450688] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.max_limit = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.450855] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.metadata_cache_expiration = 15 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451038] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.neutron_default_tenant_id = default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451213] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.use_neutron_default_nets = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451380] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451544] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451708] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.451881] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452065] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_dynamic_targets = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452239] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_jsonfile_path = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452420] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452614] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.backend = dogpile.cache.memcached {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452813] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.backend_argument = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.452992] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.config_prefix = cache.oslo {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.453179] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.dead_timeout = 60.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.453349] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.debug_cache_backend = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.453516] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.enable_retry_client = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.453684] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.enable_socket_keepalive = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.453859] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.enabled = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454038] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.enforce_fips_mode = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454205] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.expiration_time = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454369] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.hashclient_retry_attempts = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454537] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454700] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_dead_retry = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.454861] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_password = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455030] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455200] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455365] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_pool_maxsize = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455526] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455709] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_sasl_enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.455949] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.456174] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.456348] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.memcache_username = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.456520] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.proxies = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.456688] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_password = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.456863] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457057] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457238] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_server = localhost:6379 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457408] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_socket_timeout = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457570] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.redis_username = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457735] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.retry_attempts = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.457904] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.retry_delay = 0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458082] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.socket_keepalive_count = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458249] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.socket_keepalive_idle = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458411] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.socket_keepalive_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458571] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.tls_allowed_ciphers = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458759] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.tls_cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.458925] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.tls_certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.459105] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.tls_enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.459270] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cache.tls_keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.459445] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.459635] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.auth_type = password {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.459832] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460032] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460202] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460371] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460534] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.cross_az_attach = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460702] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.debug = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.460870] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.endpoint_template = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461040] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.http_retries = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461211] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461397] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461544] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.os_region_name = RegionOne {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461711] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.461872] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cinder.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462053] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462218] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.cpu_dedicated_set = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462378] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.cpu_shared_set = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462545] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.image_type_exclude_list = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462711] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.462875] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.resource_provider_association_refresh = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464210] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.shutdown_retry_interval = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464409] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464409] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] conductor.workers = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464409] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] console.allowed_origins = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464561] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] console.ssl_ciphers = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464684] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] console.ssl_minimum_version = default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.464860] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] consoleauth.enforce_session_timeout = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465041] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] consoleauth.token_ttl = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465215] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465375] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465541] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465701] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.465860] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466027] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466196] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466355] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466518] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466680] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466838] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.466997] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.467174] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.467344] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.service_type = accelerator {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.467509] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.467671] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.467831] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468040] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468239] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468405] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] cyborg.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468591] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.backend = sqlalchemy {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468799] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.connection = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.468971] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.connection_debug = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.469156] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.connection_parameters = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.469323] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.connection_recycle_time = 3600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.469487] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.connection_trace = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.469654] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.db_inc_retry_interval = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.469846] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.db_max_retries = 20 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470050] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.db_max_retry_interval = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470155] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.db_retry_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470318] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.max_overflow = 50 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470481] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.max_pool_size = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470643] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.max_retries = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.470816] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471024] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.mysql_wsrep_sync_wait = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471218] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.pool_timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471389] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.retry_interval = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471557] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.slave_connection = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471719] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.sqlite_synchronous = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.471884] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] database.use_db_reconnect = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472084] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.backend = sqlalchemy {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472262] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.connection = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472429] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.connection_debug = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472607] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.connection_parameters = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472768] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.connection_recycle_time = 3600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.472933] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.connection_trace = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.473109] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.db_inc_retry_interval = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.473815] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.db_max_retries = 20 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.473815] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.db_max_retry_interval = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.473815] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.db_retry_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.473815] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.max_overflow = 50 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474011] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.max_pool_size = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474052] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.max_retries = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474227] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474676] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474676] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.pool_timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474783] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.retry_interval = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.474841] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.slave_connection = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475024] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] api_database.sqlite_synchronous = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475202] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] devices.enabled_mdev_types = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475381] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475555] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475722] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ephemeral_storage_encryption.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.475886] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476068] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.api_servers = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476240] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476403] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476569] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476732] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.476896] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477071] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.debug = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477241] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.default_trusted_certificate_ids = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477405] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.enable_certificate_validation = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477569] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.enable_rbd_download = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477730] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.477898] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478069] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478232] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478389] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478551] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.num_retries = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478752] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.rbd_ceph_conf = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.478925] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.rbd_connect_timeout = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.479108] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.rbd_pool = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.479279] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.rbd_user = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.479438] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.479600] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.479822] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480025] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.service_type = image {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480199] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480360] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480520] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480685] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.480864] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.481040] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.verify_glance_signatures = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.481203] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] glance.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.481372] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] guestfs.debug = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.481547] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] mks.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.481904] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482107] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.manager_interval = 2400 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482279] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.precache_concurrency = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482454] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.remove_unused_base_images = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482625] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482794] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.482970] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] image_cache.subdirectory_name = _base {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483159] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.api_max_retries = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483324] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.api_retry_interval = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483485] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483648] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.auth_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483810] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.483970] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484151] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484318] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.conductor_group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484480] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484642] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484802] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.484968] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485143] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485304] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485462] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485628] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.peer_list = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485818] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.485988] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486166] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.serial_console_state_timeout = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486330] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486507] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.service_type = baremetal {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486670] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.shard = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486836] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.486995] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.487169] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.487331] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.487508] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.487669] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ironic.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.487853] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488037] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] key_manager.fixed_key = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488224] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488389] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.barbican_api_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488546] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.barbican_endpoint = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488758] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.barbican_endpoint_type = public {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.488934] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.barbican_region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489108] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489270] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489436] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489604] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489800] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.489974] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.number_of_retries = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490153] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.retry_delay = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490319] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.send_service_user_token = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490483] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490642] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490806] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.verify_ssl = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.490966] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican.verify_ssl_path = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.491147] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.491311] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.auth_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.491470] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.491628] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.491844] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492040] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492207] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492373] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492537] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] barbican_service_user.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492739] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.approle_role_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.492917] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.approle_secret_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493092] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493257] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493438] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493606] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493768] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.493938] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.kv_mountpoint = secret {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494108] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.kv_path = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494275] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.kv_version = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494435] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.namespace = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494594] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.root_token_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494784] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.494955] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.ssl_ca_crt_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495127] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495295] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.use_ssl = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495468] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495639] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495804] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.auth_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.495963] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496137] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496323] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496487] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496648] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496812] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.496975] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.497146] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.497307] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.497465] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.497625] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.497821] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498012] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498196] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.service_type = identity {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498362] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498524] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498712] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.498882] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.499075] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.499239] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] keystone.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.499443] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.connection_uri = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.499606] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_mode = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.499826] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500052] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_models = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500244] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_power_governor_high = performance {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500444] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500582] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_power_management = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500759] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.500955] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.device_detach_attempts = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501140] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.device_detach_timeout = 20 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501308] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.disk_cachemodes = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501466] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.disk_prefix = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501629] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.enabled_perf_events = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501792] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.file_backed_memory = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.501957] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.gid_maps = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502130] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.hw_disk_discard = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502295] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.hw_machine_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502468] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_rbd_ceph_conf = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502635] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502798] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.502968] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_rbd_glance_store_name = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.503152] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_rbd_pool = rbd {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.503328] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_type = default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.503486] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.images_volume_group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.503650] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.inject_key = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.503812] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.inject_partition = -2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504006] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.inject_password = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504183] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.iscsi_iface = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504350] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.iser_use_multipath = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504516] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504681] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.504847] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_downtime = 500 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505015] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505183] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505344] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_inbound_addr = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505510] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505674] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.505842] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_scheme = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506026] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_timeout_action = abort {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506201] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_tunnelled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506365] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_uri = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506531] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.live_migration_with_native_tls = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506689] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.max_queues = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.506895] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.507164] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.507334] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.nfs_mount_options = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.507638] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.507843] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508009] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508177] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508343] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508506] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_pcie_ports = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508709] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.508884] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.pmem_namespaces = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.509056] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.quobyte_client_cfg = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.509349] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.509524] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.509728] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.509911] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510087] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rbd_secret_uuid = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510254] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rbd_user = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510413] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510582] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510741] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rescue_image_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.510899] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rescue_kernel_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.511070] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rescue_ramdisk_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.511243] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.511402] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.rx_queue_size = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.511570] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.smbfs_mount_options = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.511851] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.512067] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.snapshot_compression = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.512241] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.snapshot_image_format = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.512464] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.512641] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.sparse_logical_volumes = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.512833] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.swtpm_enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513014] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.swtpm_group = tss {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513197] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.swtpm_user = tss {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513369] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.sysinfo_serial = unique {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513530] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.tb_cache_size = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513691] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.tx_queue_size = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.513858] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.uid_maps = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514031] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.use_virtio_for_bridges = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514206] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.virt_type = kvm {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514375] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.volume_clear = zero {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514540] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.volume_clear_size = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514728] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.volume_use_multipath = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.514903] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_cache_path = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.515096] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.515288] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.515467] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.515654] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.515975] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.516172] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.vzstorage_mount_user = stack {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.516343] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.516516] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.516725] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.auth_type = password {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.516904] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517092] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517253] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517412] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517570] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517742] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.default_floating_pool = public {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.517905] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518084] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.extension_sync_interval = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518251] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.http_retries = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518414] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518573] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518781] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.518968] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.519144] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.519319] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.ovs_bridge = br-int {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.519485] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.physnets = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.519660] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.region_name = RegionOne {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.519901] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520112] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.service_metadata_proxy = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520284] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520458] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.service_type = network {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520677] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520782] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.520944] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.521117] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.521301] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.521465] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] neutron.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.521638] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] notifications.bdms_in_notifications = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.521846] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] notifications.default_level = INFO {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522045] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] notifications.notification_format = unversioned {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522219] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] notifications.notify_on_state_change = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522396] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522571] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] pci.alias = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522742] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] pci.device_spec = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.522910] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] pci.report_in_placement = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523094] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523270] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.auth_type = password {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523438] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523601] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523763] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.523928] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524099] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524262] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524421] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.default_domain_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524578] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.default_domain_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524738] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.domain_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.524922] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.domain_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525098] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525267] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525427] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525585] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525742] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.525911] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.password = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526081] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.project_domain_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526250] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.project_domain_name = Default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526415] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.project_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526586] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.project_name = service {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526782] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.region_name = RegionOne {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.526951] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527126] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527298] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.service_type = placement {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527464] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527627] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527803] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.527982] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.system_scope = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.528158] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.528317] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.trust_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.528475] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.user_domain_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.528667] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.user_domain_name = Default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.528841] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.user_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529027] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.username = placement {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529214] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529374] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] placement.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529551] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.cores = 20 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529718] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.count_usage_from_placement = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.529892] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530077] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.injected_file_content_bytes = 10240 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530247] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.injected_file_path_length = 255 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530413] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.injected_files = 5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530581] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.instances = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530791] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.key_pairs = 100 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.530932] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.metadata_items = 128 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531122] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.ram = 51200 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531293] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.recheck_quota = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531461] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.server_group_members = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531628] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] quota.server_groups = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531801] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.531966] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532143] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.image_metadata_prefilter = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532308] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532472] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.max_attempts = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532634] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.max_placement_results = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532800] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.532961] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.533135] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.533310] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] scheduler.workers = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.533488] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.533659] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.533851] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534050] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534223] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534389] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534553] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534743] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.534912] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.host_subset_size = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535089] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535253] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535418] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535582] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.isolated_hosts = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535748] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.isolated_images = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.535911] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536088] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536262] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536429] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.pci_in_placement = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536595] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536782] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.536974] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.537159] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.537325] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.537492] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.537659] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.track_instance_changes = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.537843] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.538024] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metrics.required = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.538193] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metrics.weight_multiplier = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.538356] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.538521] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] metrics.weight_setting = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.538860] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539056] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539244] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.port_range = 10000:20000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539422] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539603] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539805] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] serial_console.serialproxy_port = 6083 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.539976] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540168] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.auth_type = password {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540333] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540495] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540665] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540830] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.540994] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.541179] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.send_service_user_token = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.541349] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.541511] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] service_user.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.541699] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.agent_enabled = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.541868] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.542197] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.542396] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.542571] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.html5proxy_port = 6082 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.542760] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.image_compression = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.542930] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.jpeg_compression = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543106] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.playback_compression = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543282] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.server_listen = 127.0.0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543454] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543618] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.streaming_mode = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543779] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] spice.zlib_compression = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.543946] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] upgrade_levels.baseapi = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544132] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] upgrade_levels.compute = auto {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544296] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] upgrade_levels.conductor = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544455] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] upgrade_levels.scheduler = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544620] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544785] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.544943] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545112] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545277] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545438] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545596] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545786] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.545951] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vendordata_dynamic_auth.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.546139] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.api_retry_count = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.546305] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.ca_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.546481] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.546690] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.cluster_name = testcl1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.546902] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.connection_pool_size = 10 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547083] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.console_delay_seconds = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547261] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.datastore_regex = ^datastore.* {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547475] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547651] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.host_password = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547823] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.host_port = 443 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.547994] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.host_username = administrator@vsphere.local {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.548181] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.insecure = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.548346] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.integration_bridge = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.548513] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.maximum_objects = 100 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.548713] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.pbm_default_policy = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.548903] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.pbm_enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549079] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.pbm_wsdl_location = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549255] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549417] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.serial_port_proxy_uri = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549579] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.serial_port_service_uri = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549746] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.task_poll_interval = 0.5 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.549922] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.use_linked_clone = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.550104] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.vnc_keymap = en-us {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.550274] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.vnc_port = 5900 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.550438] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vmware.vnc_port_total = 10000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.550627] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.auth_schemes = ['none'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.550809] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.551120] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.551309] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.551484] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.novncproxy_port = 6080 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.551663] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.server_listen = 127.0.0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.551867] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552047] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.vencrypt_ca_certs = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552216] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.vencrypt_client_cert = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552378] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vnc.vencrypt_client_key = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552563] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552731] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_deep_image_inspection = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.552898] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553073] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553243] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553409] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.disable_rootwrap = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553574] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.enable_numa_live_migration = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553738] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.553904] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554080] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554249] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.libvirt_disable_apic = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554413] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554576] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554747] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.554934] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555113] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555283] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555449] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555613] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555779] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.555944] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.556142] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.556315] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.client_socket_timeout = 900 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.556485] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.default_pool_size = 1000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.556677] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.keep_alive = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.556861] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.max_header_line = 16384 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557042] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557211] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.ssl_ca_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557375] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.ssl_cert_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557539] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.ssl_key_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557706] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.tcp_keepidle = 600 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.557923] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.558103] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] zvm.ca_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.558267] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] zvm.cloud_connector_url = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.558565] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.558764] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] zvm.reachable_timeout = 300 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.558953] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.enforce_new_defaults = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.559142] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.enforce_scope = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.559321] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.policy_default_rule = default {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.559504] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.559681] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.policy_file = policy.yaml {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.559854] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560026] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560189] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560348] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560512] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560682] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.560883] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561089] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.connection_string = messaging:// {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561265] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.enabled = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561435] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.es_doc_type = notification {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561600] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.es_scroll_size = 10000 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561769] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.es_scroll_time = 2m {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.561935] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.filter_error_trace = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562114] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.hmac_keys = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562287] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.sentinel_service_name = mymaster {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562451] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.socket_timeout = 0.1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562613] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.trace_requests = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562776] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler.trace_sqlalchemy = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.562949] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler_jaeger.process_tags = {} {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563124] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler_jaeger.service_name_prefix = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563288] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] profiler_otlp.service_name_prefix = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563451] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] remote_debug.host = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563609] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] remote_debug.port = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563792] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.563982] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.564165] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.564329] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.564491] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.564941] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.564941] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565012] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565129] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565303] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565462] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565633] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565798] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.565967] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.566151] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.566323] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.566485] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.566682] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.566876] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567069] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567242] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567415] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567583] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567749] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.567910] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568085] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568251] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568414] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568583] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568779] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.568959] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.569144] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.569313] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.569487] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.569686] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.569868] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570069] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570241] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_notifications.retry = -1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570427] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570603] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570777] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.auth_section = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.570936] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.auth_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571110] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.cafile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571311] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.certfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571434] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.collect_timing = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571594] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.connect_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571751] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.connect_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.571912] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.endpoint_id = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572082] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.endpoint_override = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572247] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.insecure = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572404] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.keyfile = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572563] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.max_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572740] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.min_version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.572911] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.region_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573085] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.retriable_status_codes = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573248] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.service_name = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573406] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.service_type = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573572] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.split_loggers = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573730] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.status_code_retries = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.573890] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.status_code_retry_delay = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574058] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.timeout = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574219] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.valid_interfaces = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574375] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_limit.version = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574539] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_reports.file_event_handler = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574705] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.574866] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] oslo_reports.log_dir = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575047] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575213] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575372] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575539] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575722] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.575901] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576085] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576252] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576412] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576579] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576771] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.576939] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] vif_plug_ovs_privileged.user = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577124] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577307] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577481] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577653] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577827] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.577992] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.578174] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.578338] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.578515] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.578721] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.isolate_vif = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.578917] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579102] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579278] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579452] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579625] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_vif_ovs.per_port_bridge = False {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579794] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_brick.lock_path = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.579959] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580136] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580307] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.capabilities = [21] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580468] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580626] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.helper_command = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580794] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.580963] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581144] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] privsep_osbrick.user = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581306] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581466] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.group = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581624] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.helper_command = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581812] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.581989] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.582166] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] nova_sys_admin.user = None {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.582300] env[61570]: DEBUG oslo_service.service [None req-c30424e5-efa5-431d-9a91-01d17a3c0032 None None] ******************************************************************************** {{(pid=61570) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 498.582731] env[61570]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 498.593324] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Getting list of instances from cluster (obj){ [ 498.593324] env[61570]: value = "domain-c8" [ 498.593324] env[61570]: _type = "ClusterComputeResource" [ 498.593324] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 498.594625] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc5f9d6-f8fc-4113-9efe-ec027c5d3bca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.604517] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Got total of 0 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 498.605100] env[61570]: WARNING nova.virt.vmwareapi.driver [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 498.605569] env[61570]: INFO nova.virt.node [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Generated node identity 829dc000-b508-440d-ae59-f7cfbca90113 [ 498.605835] env[61570]: INFO nova.virt.node [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Wrote node identity 829dc000-b508-440d-ae59-f7cfbca90113 to /opt/stack/data/n-cpu-1/compute_id [ 498.619609] env[61570]: WARNING nova.compute.manager [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Compute nodes ['829dc000-b508-440d-ae59-f7cfbca90113'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 498.661637] env[61570]: INFO nova.compute.manager [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 498.687319] env[61570]: WARNING nova.compute.manager [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 498.687578] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.687802] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.687959] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.688128] env[61570]: DEBUG nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 498.689387] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffafdb7d-dd43-438b-b158-3b73f1d4a6db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.698188] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947dbba6-ae66-4cc3-85b0-8ff049a993e1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.712401] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5bdf5f-7df0-4381-ac89-cee464f3c310 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.718841] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d7b205-730e-4ff2-9e10-66afe941ee43 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.749483] env[61570]: DEBUG nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180595MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 498.749679] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.749850] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.763188] env[61570]: WARNING nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] No compute node record for cpu-1:829dc000-b508-440d-ae59-f7cfbca90113: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 829dc000-b508-440d-ae59-f7cfbca90113 could not be found. [ 498.778725] env[61570]: INFO nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 829dc000-b508-440d-ae59-f7cfbca90113 [ 498.837148] env[61570]: DEBUG nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 498.837320] env[61570]: DEBUG nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] stats={'failed_builds': '0'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 498.955219] env[61570]: INFO nova.scheduler.client.report [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] [req-9adadbb2-39af-4816-9cdb-8a03e046a79d] Created resource provider record via placement API for resource provider with UUID 829dc000-b508-440d-ae59-f7cfbca90113 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 498.973239] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa77563-4aa6-4d7a-960b-c2d0f13d34d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.981063] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540954ff-25c2-448d-bfd8-07f0e15deb67 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.010265] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25569790-7c4f-4770-9472-bd37c30e37f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.018183] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d89d62-766b-4e6b-a4f2-bfe32d62bffc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 499.032804] env[61570]: DEBUG nova.compute.provider_tree [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.077266] env[61570]: DEBUG nova.scheduler.client.report [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Updated inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 499.077519] env[61570]: DEBUG nova.compute.provider_tree [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Updating resource provider 829dc000-b508-440d-ae59-f7cfbca90113 generation from 0 to 1 during operation: update_inventory {{(pid=61570) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.077665] env[61570]: DEBUG nova.compute.provider_tree [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 499.127564] env[61570]: DEBUG nova.compute.provider_tree [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Updating resource provider 829dc000-b508-440d-ae59-f7cfbca90113 generation from 1 to 2 during operation: update_traits {{(pid=61570) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.145687] env[61570]: DEBUG nova.compute.resource_tracker [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 499.145884] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.396s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.146060] env[61570]: DEBUG nova.service [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Creating RPC server for service compute {{(pid=61570) start /opt/stack/nova/nova/service.py:182}} [ 499.159895] env[61570]: DEBUG nova.service [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] Join ServiceGroup membership for this service compute {{(pid=61570) start /opt/stack/nova/nova/service.py:199}} [ 499.160097] env[61570]: DEBUG nova.servicegroup.drivers.db [None req-e4c58bef-89df-42ee-8d38-d51d352d9476 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61570) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 508.416012] env[61570]: DEBUG dbcounter [-] [61570] Writing DB stats nova_cell0:SELECT=1 {{(pid=61570) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 508.420711] env[61570]: DEBUG dbcounter [-] [61570] Writing DB stats nova_cell1:SELECT=1 {{(pid=61570) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 521.163995] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_power_states {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 521.177456] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 521.177456] env[61570]: value = "domain-c8" [ 521.177456] env[61570]: _type = "ClusterComputeResource" [ 521.177456] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 521.178565] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80593748-d96c-4e9d-a830-1b28aabfe180 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.187379] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 0 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 521.187615] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 521.187940] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 521.187940] env[61570]: value = "domain-c8" [ 521.187940] env[61570]: _type = "ClusterComputeResource" [ 521.187940] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 521.188846] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357ce5a9-7e02-4fde-b5f8-cb516f9b06c3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.196767] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 0 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 540.083867] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.084275] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.131543] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 540.287198] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.288011] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.289659] env[61570]: INFO nova.compute.claims [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.479989] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fca0536-044c-463d-a5ac-4871f9495366 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.493452] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a129d7-6e76-4851-98a3-d851230db46b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.535021] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d45dcd8-8512-4a6f-8ab2-1676ce22560f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.544267] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705f8f18-730c-4a9d-86ca-da5130b74cfe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.568919] env[61570]: DEBUG nova.compute.provider_tree [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.581646] env[61570]: DEBUG nova.scheduler.client.report [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.607172] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.607172] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 540.660901] env[61570]: DEBUG nova.compute.utils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.660901] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 540.660901] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 540.684664] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 540.791449] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 542.798131] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.798131] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.798131] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.798540] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.798540] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.798540] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.799188] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.799667] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.800225] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.801210] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.801585] env[61570]: DEBUG nova.virt.hardware [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.803570] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e1e206-c7e9-4c10-9d17-8c499a2a5c86 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.817409] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78426c39-5ac3-4e4e-a579-44feaada7499 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.840219] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95bb830-398e-4757-aa35-5f6b2a2d59f5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.934869] env[61570]: DEBUG nova.policy [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cb020350c8b4d46bf4a7e6ee63263a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2550f62320f14e59961625dff376b2ea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 543.645367] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Successfully created port: 0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.987850] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "8926a7b1-989f-4290-8828-1e75efbc0553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.988191] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "8926a7b1-989f-4290-8828-1e75efbc0553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.048741] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.181263] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.181358] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.184450] env[61570]: INFO nova.compute.claims [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.407705] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262da275-a8a8-4988-b569-358235be81be {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.416223] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc21eacf-9b82-4663-a513-958e96a60cf8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.453754] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b79796-b54b-4594-8a82-2f4f39c0b406 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.467815] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfe9b8c-0edf-4360-a097-99b40cddf99e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.483952] env[61570]: DEBUG nova.compute.provider_tree [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.496482] env[61570]: DEBUG nova.scheduler.client.report [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.520120] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.520120] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.602737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "a95948a6-bad7-4cf0-902f-7c7bb4157d2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.603187] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "a95948a6-bad7-4cf0-902f-7c7bb4157d2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.624064] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.630617] env[61570]: DEBUG nova.compute.utils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.632360] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.636106] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.655018] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.712287] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.712287] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.713870] env[61570]: INFO nova.compute.claims [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.789461] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.826588] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.826854] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.827138] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.827200] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.827440] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.827612] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.827828] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.827986] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.828175] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.828652] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.828652] env[61570]: DEBUG nova.virt.hardware [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.829434] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1638afeb-89f1-4a7b-9d2c-45d121dc519e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.847084] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d742ec-8a91-4971-92bc-6d74e9707175 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.937211] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "766812ee-e272-4779-b85d-7fdde876e877" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.937468] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.939641] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3511577-a416-466c-a10d-26cf685f2e15 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.951185] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7947786-1dd7-4b3e-a070-9fd0a27d3c55 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.985829] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ae6126-5787-4c25-94e0-bc61150c2432 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.989237] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 546.998568] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69920a4c-1864-44ba-a732-c5902acc3ffe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.013932] env[61570]: DEBUG nova.compute.provider_tree [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.036382] env[61570]: DEBUG nova.scheduler.client.report [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.056517] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.345s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.057044] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 547.088450] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.088789] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.092316] env[61570]: INFO nova.compute.claims [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.139151] env[61570]: DEBUG nova.policy [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '442bdb65d15d4c9e94eacc85e0b1300c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b65c16386e5e420d9e1ca6fc7aa13b49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.165849] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Successfully updated port: 0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 547.219653] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.219858] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquired lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.220243] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 547.268349] env[61570]: DEBUG nova.compute.utils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.273886] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Not allocating networking since 'none' was specified. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 547.305114] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 547.448627] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 547.452818] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.463981] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35a5afe-5265-4fe1-90c1-b678bd5ee86e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.474534] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dfe6f6-748f-4a19-a08c-e330846311fa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.517427] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35746b6e-ab4f-4a02-8579-8a906bd327e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.522108] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.522572] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.522572] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.522776] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.522832] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.522974] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.523244] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.523460] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.523540] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.523692] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.523883] env[61570]: DEBUG nova.virt.hardware [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.525128] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce683fa-25f9-4e54-99e6-29e7251689dc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.537602] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eef42b2-616f-4bf4-9580-1d79c05a26e4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.553568] env[61570]: DEBUG nova.compute.provider_tree [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.558224] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff7220e-f58e-4dfc-a808-d8dde066b229 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.572622] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Instance VIF info [] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 547.583268] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.584230] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61c5f6a2-f2da-4948-a9ab-e13c9cf539f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.586552] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.600799] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Created folder: OpenStack in parent group-v4. [ 547.600799] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating folder: Project (23f18f56552247eea7220e1c2d866886). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.601071] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-896f9ae6-194f-4a44-9ca2-92bb5eb5c8c4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.604927] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.516s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.605311] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 547.612099] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Created folder: Project (23f18f56552247eea7220e1c2d866886) in parent group-v953072. [ 547.612099] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating folder: Instances. Parent ref: group-v953073. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 547.614327] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18598d9a-953d-42c3-bcf7-84edf53a58c2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.626123] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Created folder: Instances in parent group-v953073. [ 547.626341] env[61570]: DEBUG oslo.service.loopingcall [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.626505] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 547.626914] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c88c143c-075a-4a19-a383-72ece3d124c8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.644665] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 547.644665] env[61570]: value = "task-4891241" [ 547.644665] env[61570]: _type = "Task" [ 547.644665] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.656135] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891241, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.658767] env[61570]: DEBUG nova.compute.utils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.660847] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 547.660933] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 547.685289] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 547.782040] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 547.816209] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.816501] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.816686] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.817027] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.817027] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.817228] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.817459] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.817647] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.817867] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.818017] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.818225] env[61570]: DEBUG nova.virt.hardware [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.819529] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45305c5-2972-4c74-ac3b-457027f355fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.829012] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c9256b-2569-4547-812e-2fe69c6b797f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.009262] env[61570]: DEBUG nova.policy [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd87c59eede9c4bf186380f9f37a2caaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21713d88a2d5483f89ae59404d3aa235', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 548.157490] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891241, 'name': CreateVM_Task, 'duration_secs': 0.355122} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.157939] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 548.159098] env[61570]: DEBUG oslo_vmware.service [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d3d33b-da3f-4c66-8911-4108efc8e2a4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.167053] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.167520] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.169231] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 548.169882] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63ecc9f9-6a5f-4d28-bcd4-a6880b415b5a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.177818] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for the task: (returnval){ [ 548.177818] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ae7bab-46b4-2dca-d96c-fad51050d2dd" [ 548.177818] env[61570]: _type = "Task" [ 548.177818] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.193313] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.193645] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 548.193877] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.194034] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.194442] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 548.194715] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11bc5239-cdeb-4bb1-ae39-e11ad6a23920 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.213382] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 548.213382] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 548.214109] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403e0143-3751-4b30-81c4-0941c772099c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.226770] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bda5158-cfaa-431b-8efd-cfe7bc8c2435 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.234033] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for the task: (returnval){ [ 548.234033] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]523bbd35-4ff4-52cd-30e4-559ac2e4a589" [ 548.234033] env[61570]: _type = "Task" [ 548.234033] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.244108] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]523bbd35-4ff4-52cd-30e4-559ac2e4a589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.424387] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Updating instance_info_cache with network_info: [{"id": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "address": "fa:16:3e:d4:e2:ce", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dcf2e37-5e", "ovs_interfaceid": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.448697] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Releasing lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.449045] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Instance network_info: |[{"id": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "address": "fa:16:3e:d4:e2:ce", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dcf2e37-5e", "ovs_interfaceid": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 548.450127] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:e2:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0dcf2e37-5e8f-4650-9f10-0144263fd756', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 548.460257] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Creating folder: Project (2550f62320f14e59961625dff376b2ea). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 548.460897] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cb3ab8d-a214-4dbd-bbb5-f979dcb56c17 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.475135] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Created folder: Project (2550f62320f14e59961625dff376b2ea) in parent group-v953072. [ 548.475565] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Creating folder: Instances. Parent ref: group-v953076. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 548.475660] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59781c01-4ddd-422b-9475-dae7d803c3ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.493984] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Created folder: Instances in parent group-v953076. [ 548.493984] env[61570]: DEBUG oslo.service.loopingcall [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.494137] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 548.494302] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d811a62-ce9a-43c7-ad2f-190c6ab4a6d8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.523189] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 548.523189] env[61570]: value = "task-4891244" [ 548.523189] env[61570]: _type = "Task" [ 548.523189] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.536833] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891244, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.753497] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 548.753809] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating directory with path [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 548.754140] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a554f5a-ec08-40b2-b77d-39b11ea985cf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.781763] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Created directory with path [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 548.783677] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Fetch image to [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 548.783677] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 548.783677] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4596d9-16ce-4313-b97c-9350b00eacd5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.797431] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc447d7f-cd5f-4d88-b042-82f3ac86ff04 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.821614] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68edca0b-523e-4c6f-b2d0-4f616ba40238 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.869156] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c54506-61d3-4b6a-bf4a-2242d6c5e79b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.876668] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75d92127-89d7-47e7-8339-1b4920e5e579 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.924205] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "20b016b6-a53d-4b1f-a7c7-539d1636091b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.924421] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "20b016b6-a53d-4b1f-a7c7-539d1636091b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.943020] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 548.976364] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 549.033754] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.034273] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.041202] env[61570]: INFO nova.compute.claims [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.058510] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891244, 'name': CreateVM_Task, 'duration_secs': 0.412277} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.058510] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 549.080922] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.081162] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.083685] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 549.083685] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86a41b34-03ab-4a6e-82cc-873f04967f60 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.090671] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Waiting for the task: (returnval){ [ 549.090671] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]527eefb0-63dd-d691-7ec8-9390d0e81386" [ 549.090671] env[61570]: _type = "Task" [ 549.090671] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.106583] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]527eefb0-63dd-d691-7ec8-9390d0e81386, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.128463] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 549.208646] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 549.208646] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 549.343020] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Successfully created port: f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.354109] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0536bae-861e-45ab-bce3-29c22bcab917 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.368917] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7106a9bf-4d27-4833-ba11-189057d66ca0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.404889] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ee0d0a-06be-4d73-80db-a9f6e7addbf5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.413536] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35430bd5-d4cb-4110-860f-2734df0dcf6d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.430817] env[61570]: DEBUG nova.compute.provider_tree [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.451677] env[61570]: DEBUG nova.scheduler.client.report [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.473861] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.440s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.474665] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 549.530711] env[61570]: DEBUG nova.compute.utils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.533579] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 549.533763] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.548195] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 549.613218] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.613525] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 549.613668] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.639736] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 549.676419] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.677118] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.677460] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.681020] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.681020] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.681020] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.681020] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.681020] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.681246] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.681246] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.681246] env[61570]: DEBUG nova.virt.hardware [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.681246] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dbe402-c253-4a40-bdc7-379b69356621 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.691731] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117cdd1b-e2ab-4877-9958-aedd84ef63e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.847986] env[61570]: DEBUG nova.policy [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab2f35e4a6b744db8470656aed0cc984', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34fecdc3cc7f47fdba241831e5f27f53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 550.823124] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Successfully created port: dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.310282] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Successfully created port: 03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.610460] env[61570]: DEBUG nova.compute.manager [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Received event network-vif-plugged-0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 551.610460] env[61570]: DEBUG oslo_concurrency.lockutils [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] Acquiring lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.610460] env[61570]: DEBUG oslo_concurrency.lockutils [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] Lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.612290] env[61570]: DEBUG oslo_concurrency.lockutils [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] Lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.613540] env[61570]: DEBUG nova.compute.manager [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] No waiting events found dispatching network-vif-plugged-0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 551.613857] env[61570]: WARNING nova.compute.manager [req-e58b8a8d-e662-40bd-816e-d807023997be req-b55866a8-1bba-45be-8a66-223b32ac005c service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Received unexpected event network-vif-plugged-0dcf2e37-5e8f-4650-9f10-0144263fd756 for instance with vm_state building and task_state spawning. [ 552.087873] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Successfully updated port: dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.110115] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.110722] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.111279] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.180372] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.275999] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "48b87f3a-879c-4578-90cf-3e4328299e81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.277430] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.295864] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 552.389407] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.389688] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.392267] env[61570]: INFO nova.compute.claims [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.500890] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Updating instance_info_cache with network_info: [{"id": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "address": "fa:16:3e:29:bb:c4", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda317d0-cb", "ovs_interfaceid": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.524622] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.524622] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Instance network_info: |[{"id": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "address": "fa:16:3e:29:bb:c4", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda317d0-cb", "ovs_interfaceid": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 552.525013] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:bb:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dda317d0-cb04-4e2a-8e4a-ac825d4807ec', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.532525] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating folder: Project (34fecdc3cc7f47fdba241831e5f27f53). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.535922] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbcb3be7-e832-47e7-b087-35d5b0913580 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.548009] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created folder: Project (34fecdc3cc7f47fdba241831e5f27f53) in parent group-v953072. [ 552.548009] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating folder: Instances. Parent ref: group-v953079. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 552.548009] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44e1c1c6-c8a4-4093-ac65-9947a2144531 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.557592] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created folder: Instances in parent group-v953079. [ 552.557817] env[61570]: DEBUG oslo.service.loopingcall [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.561378] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 552.561436] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75b10fba-c182-4a9c-98c4-c41d76ceee32 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.590742] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.590981] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.596546] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.596546] env[61570]: value = "task-4891247" [ 552.596546] env[61570]: _type = "Task" [ 552.596546] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.608822] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.609597] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 552.626917] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ad9161-afd1-4018-9bd3-dc9d96c88672 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.636704] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404df1b1-7c84-45e2-a226-5bf6a5649866 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.679160] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa2ad98-48b7-48fb-93fc-76c4622d7b0b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.691776] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dffdda-132d-435c-85fe-149a0c23cf2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.699313] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.708891] env[61570]: DEBUG nova.compute.provider_tree [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.732828] env[61570]: DEBUG nova.scheduler.client.report [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.767534] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.768103] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 552.773332] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.072s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.773332] env[61570]: INFO nova.compute.claims [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.814994] env[61570]: DEBUG nova.compute.utils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.816774] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 552.816968] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.855139] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 552.945623] env[61570]: DEBUG nova.policy [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7522346ca5a549afae4b72a63021ae07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '075f06a8aabb4d1d8ec26c7b3f341791', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 552.949192] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 552.991526] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.991827] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.991981] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.992196] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.992317] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.992457] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.992661] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.993454] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.993454] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.993454] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.993454] env[61570]: DEBUG nova.virt.hardware [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.994997] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfa9d55-f8ca-4ddc-a998-8b38a316f6cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.008782] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2005e02b-a619-44d3-b77e-8a6be9cb6581 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.061263] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400308e1-c3a1-4a85-a13f-33028ec0ad5c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.071691] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9236a192-dcc8-4b92-bc15-9962acfcb7d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.120172] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc008695-848d-490a-bb86-f72c4100d585 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.129903] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.133412] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cb35b3-d670-409f-898e-7e891ce28a1e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.148757] env[61570]: DEBUG nova.compute.provider_tree [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.172499] env[61570]: DEBUG nova.scheduler.client.report [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.196669] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.425s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.197542] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 553.282661] env[61570]: DEBUG nova.compute.utils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.287243] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 553.288605] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 553.303752] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 553.418916] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 553.454186] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.454272] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.454363] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.454691] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.454691] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.456071] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.456071] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.456071] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.456071] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.456071] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.456461] env[61570]: DEBUG nova.virt.hardware [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.456775] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1477f843-cf63-4f2d-91ca-cede4dc67110 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.465890] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede758f-5e65-4e0e-9474-297d49365890 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.471408] env[61570]: DEBUG nova.policy [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76d0d91cb51a4c3aa913abe60281e9d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a90ae0f6cf7e45b9a7408bdd6317387e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 553.628811] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.701391] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Successfully created port: b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.130157] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.364093] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Successfully updated port: f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 554.380078] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.380235] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquired lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.380394] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.575609] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.634068] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.641124] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Successfully created port: 165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.762794] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.763132] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.763366] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 554.763526] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 554.793739] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794371] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794371] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794371] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794371] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794667] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794667] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 554.794667] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 554.795858] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.795858] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.795858] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.795858] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.796104] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.797023] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.797023] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 554.797023] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.814328] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.814328] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.814328] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.814328] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 554.816095] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f5cd2f-4145-4809-8ad8-f317fd1a4e97 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.828051] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60639fb3-6bdd-4f60-9ad7-a53a964ee245 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.844237] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6b0da2-d4d4-4f66-8b56-e3cb345a5e11 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.856023] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a665a1-7d43-4f40-a556-c462210a4d0e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.893480] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180608MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 554.893480] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.893868] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.989932] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990108] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8926a7b1-989f-4290-8828-1e75efbc0553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990256] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance a95948a6-bad7-4cf0-902f-7c7bb4157d2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990359] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 766812ee-e272-4779-b85d-7fdde876e877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990554] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20b016b6-a53d-4b1f-a7c7-539d1636091b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990626] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 48b87f3a-879c-4578-90cf-3e4328299e81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 554.990743] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 555.031174] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "ded35886-716c-4725-8fc9-cd6dfc04281a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.031720] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.044553] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 555.044963] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 555.045091] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] stats={'failed_builds': '0', 'num_instances': '7', 'num_vm_building': '7', 'num_task_spawning': '7', 'num_os_type_None': '7', 'num_proj_2550f62320f14e59961625dff376b2ea': '1', 'io_workload': '7', 'num_proj_b65c16386e5e420d9e1ca6fc7aa13b49': '1', 'num_proj_23f18f56552247eea7220e1c2d866886': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_075f06a8aabb4d1d8ec26c7b3f341791': '1', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 555.048997] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 555.111916] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.132985] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891247, 'name': CreateVM_Task, 'duration_secs': 2.363613} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.133314] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.133834] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.134014] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.134641] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.134899] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d27d5d13-a7f3-44e3-a667-f28f2b89596e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.140504] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 555.140504] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5277a80a-5760-e7f7-827f-269779ab91f2" [ 555.140504] env[61570]: _type = "Task" [ 555.140504] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.149923] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5277a80a-5760-e7f7-827f-269779ab91f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.210907] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a311d39-cb89-4012-a918-cc445dfabb88 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.221169] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa0b62d-2681-4d29-8d32-72dcbdfad6bc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.257082] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140ad3c1-e0a6-436a-baaa-7f44a86f925e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.266506] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1584a28f-06a0-4fe7-af49-b715a103ab16 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.283944] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.298918] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.323889] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 555.324137] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.430s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.324418] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.213s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.325888] env[61570]: INFO nova.compute.claims [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.541326] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cc91b1-e3fe-4751-bc07-03d71be3d5a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.555964] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bdc607a-3291-49ba-83d5-4a0b99c8c0f5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.595928] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Successfully updated port: b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.599561] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800c3e78-0470-456c-b1cd-f5cb59535f49 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.610480] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07adf28c-7b32-4f45-96d2-a9e9f9df23bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.615861] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.615861] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquired lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.616044] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.630062] env[61570]: DEBUG nova.compute.provider_tree [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.650032] env[61570]: DEBUG nova.scheduler.client.report [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.665822] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.666275] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.667837] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.676216] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.352s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.676666] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 555.735560] env[61570]: DEBUG nova.compute.utils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.738935] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 555.739079] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 555.755691] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 555.819093] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Successfully updated port: 03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.828625] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.828766] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.829726] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.853122] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 555.865273] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Updating instance_info_cache with network_info: [{"id": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "address": "fa:16:3e:22:bf:1e", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27ab8d1-17", "ovs_interfaceid": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.873918] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.890039] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.890039] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.890039] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.890397] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.890458] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.890665] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.890837] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.890988] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.891152] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.891316] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.891484] env[61570]: DEBUG nova.virt.hardware [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.893147] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1e5f76-c5f9-4fa8-86f9-1e4fa0147741 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.895955] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Releasing lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.896263] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Instance network_info: |[{"id": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "address": "fa:16:3e:22:bf:1e", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27ab8d1-17", "ovs_interfaceid": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 555.897048] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:bf:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.905831] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Creating folder: Project (b65c16386e5e420d9e1ca6fc7aa13b49). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.907030] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c73fab66-23be-4395-891c-2e13a24c267a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.913445] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e595f03a-1ebf-4e2b-abde-5596ef5e4349 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.930319] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Created folder: Project (b65c16386e5e420d9e1ca6fc7aa13b49) in parent group-v953072. [ 555.930969] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Creating folder: Instances. Parent ref: group-v953082. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.930969] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e53ccf3e-e38f-45be-aff5-9b4fc682b51c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.942984] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Created folder: Instances in parent group-v953082. [ 555.943268] env[61570]: DEBUG oslo.service.loopingcall [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.945337] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.945337] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d00a259c-7473-487e-b72c-3c0d316c6615 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.965308] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.965308] env[61570]: value = "task-4891250" [ 555.965308] env[61570]: _type = "Task" [ 555.965308] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.975196] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891250, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.028695] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.029442] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.039464] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.048184] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 556.126937] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.127315] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.128965] env[61570]: INFO nova.compute.claims [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.330773] env[61570]: DEBUG nova.policy [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64b6978ae10491da72d3f00d9cf0496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41f716cba1d94cf28a341cc027112585', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 556.408453] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4537f133-6083-4cc8-b899-547acd7be6bb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.416793] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06465d1a-264d-4b37-8431-aa3ddf982bda {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.450179] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96c53a1-60c8-4aa1-af29-bd4a4426f0e4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.461501] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fd2a2b-df45-4b5d-a48d-806371ea7555 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.486134] env[61570]: DEBUG nova.compute.provider_tree [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.494017] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891250, 'name': CreateVM_Task, 'duration_secs': 0.381418} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.494017] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 556.494017] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.494017] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.494017] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.494701] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba34466-7144-4fdf-a004-426c12f1e358 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.500384] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Waiting for the task: (returnval){ [ 556.500384] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ac6046-d31e-b005-580b-f333d3f172a5" [ 556.500384] env[61570]: _type = "Task" [ 556.500384] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.505255] env[61570]: DEBUG nova.scheduler.client.report [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.514604] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ac6046-d31e-b005-580b-f333d3f172a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.515811] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Updating instance_info_cache with network_info: [{"id": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "address": "fa:16:3e:d6:90:21", "network": {"id": "1c7fa94e-5a76-4123-a424-d00d7157fc06", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1853833901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075f06a8aabb4d1d8ec26c7b3f341791", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50886eea-591a-452c-a27b-5f22cfc9df85", "external-id": "nsx-vlan-transportzone-578", "segmentation_id": 578, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ae3b0d-6c", "ovs_interfaceid": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.532811] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.532936] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 556.537200] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Releasing lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.537365] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance network_info: |[{"id": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "address": "fa:16:3e:d6:90:21", "network": {"id": "1c7fa94e-5a76-4123-a424-d00d7157fc06", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1853833901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075f06a8aabb4d1d8ec26c7b3f341791", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50886eea-591a-452c-a27b-5f22cfc9df85", "external-id": "nsx-vlan-transportzone-578", "segmentation_id": 578, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ae3b0d-6c", "ovs_interfaceid": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 556.538358] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:90:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50886eea-591a-452c-a27b-5f22cfc9df85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7ae3b0d-6c3d-4abc-a165-8573959c64a7', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.547584] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Creating folder: Project (075f06a8aabb4d1d8ec26c7b3f341791). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.549039] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06f5f9dc-aca2-49d1-ab9f-88e3cd1493e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.561058] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Created folder: Project (075f06a8aabb4d1d8ec26c7b3f341791) in parent group-v953072. [ 556.562193] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Creating folder: Instances. Parent ref: group-v953085. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 556.562193] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abf8ef64-dbe5-4509-821e-bb2d4bd0dd6f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.575176] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Created folder: Instances in parent group-v953085. [ 556.575176] env[61570]: DEBUG oslo.service.loopingcall [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.575176] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 556.575176] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0db1fc96-80f0-4acf-8e8d-f248cc613793 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.598365] env[61570]: DEBUG nova.compute.utils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.601026] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 556.601026] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 556.605711] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.605711] env[61570]: value = "task-4891253" [ 556.605711] env[61570]: _type = "Task" [ 556.605711] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.614939] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891253, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.618215] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 556.652306] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Successfully updated port: 165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 556.674571] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.675357] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquired lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.675662] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.729892] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 556.774809] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.775178] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.775220] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.775410] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.775559] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.775704] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.775911] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.776538] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.776743] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.776917] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.777972] env[61570]: DEBUG nova.virt.hardware [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.778090] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0866056c-d9da-45ea-9641-7218b4e056fc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.789535] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ad2190-5a43-4eac-84cf-9fb390b52d3a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.811188] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.817705] env[61570]: DEBUG nova.policy [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef978c0457c8405490288b21f0512af0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5beaa7046a57489b8b0ca03a79344d08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 556.898082] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "12435283-c350-4d85-be82-1c85e1ea17be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.898335] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.909526] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 556.975682] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.975682] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.977214] env[61570]: INFO nova.compute.claims [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 557.011457] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.011732] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.011953] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.032188] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Updating instance_info_cache with network_info: [{"id": "03b18258-5fde-464a-8a20-d20940879e4a", "address": "fa:16:3e:a9:6e:5a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03b18258-5f", "ovs_interfaceid": "03b18258-5fde-464a-8a20-d20940879e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.056020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.056020] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance network_info: |[{"id": "03b18258-5fde-464a-8a20-d20940879e4a", "address": "fa:16:3e:a9:6e:5a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03b18258-5f", "ovs_interfaceid": "03b18258-5fde-464a-8a20-d20940879e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.056278] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:6e:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03b18258-5fde-464a-8a20-d20940879e4a', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.061539] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating folder: Project (21713d88a2d5483f89ae59404d3aa235). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.062780] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86d6fcbf-8c78-46ef-8642-99457dc0e8fd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.075685] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created folder: Project (21713d88a2d5483f89ae59404d3aa235) in parent group-v953072. [ 557.075919] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating folder: Instances. Parent ref: group-v953088. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.076176] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e30d74d-541d-43d1-ac90-a304ba107a4f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.096131] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created folder: Instances in parent group-v953088. [ 557.096131] env[61570]: DEBUG oslo.service.loopingcall [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.099836] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.100788] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cfa4b87-7a22-4c9c-889e-316972eb2de8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.136026] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891253, 'name': CreateVM_Task} progress is 25%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.136026] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.136026] env[61570]: value = "task-4891256" [ 557.136026] env[61570]: _type = "Task" [ 557.136026] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.146516] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891256, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.307300] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9037474c-b4c6-4958-9195-eb4eda13db2f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.317542] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55173123-aae4-46cd-99c2-69ccc9664711 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.357461] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aceff0d-87fa-4814-9896-621f3152b4c7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.366184] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26d4418-22b5-4cff-aab2-d614641381fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.381421] env[61570]: DEBUG nova.compute.provider_tree [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.400534] env[61570]: DEBUG nova.scheduler.client.report [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.408438] env[61570]: DEBUG nova.compute.manager [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Received event network-vif-plugged-f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 557.408538] env[61570]: DEBUG oslo_concurrency.lockutils [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] Acquiring lock "8926a7b1-989f-4290-8828-1e75efbc0553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.408868] env[61570]: DEBUG oslo_concurrency.lockutils [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] Lock "8926a7b1-989f-4290-8828-1e75efbc0553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.408931] env[61570]: DEBUG oslo_concurrency.lockutils [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] Lock "8926a7b1-989f-4290-8828-1e75efbc0553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.409082] env[61570]: DEBUG nova.compute.manager [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] No waiting events found dispatching network-vif-plugged-f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 557.409259] env[61570]: WARNING nova.compute.manager [req-5069b773-bd3d-4003-b675-6c45443db838 req-3cf4c776-4212-4778-a98f-851ddcba4f68 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Received unexpected event network-vif-plugged-f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 for instance with vm_state building and task_state spawning. [ 557.426029] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.448s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.426029] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 557.482515] env[61570]: DEBUG nova.compute.utils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.484852] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 557.485034] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.508632] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 557.633317] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891253, 'name': CreateVM_Task, 'duration_secs': 0.632811} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.634288] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 557.636865] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 557.638832] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.638832] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.639387] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.640074] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a41554-f9a0-4f26-9ad7-aedb119b3747 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.657230] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for the task: (returnval){ [ 557.657230] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a519b7-11ea-625c-8837-77f16ed16d19" [ 557.657230] env[61570]: _type = "Task" [ 557.657230] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.657230] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891256, 'name': CreateVM_Task, 'duration_secs': 0.440318} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.657230] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 557.660801] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.663842] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Updating instance_info_cache with network_info: [{"id": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "address": "fa:16:3e:2e:90:a7", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165d0a2f-d5", "ovs_interfaceid": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.671404] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.671748] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.671958] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.672909] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.672909] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.673413] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4d0d3fb-3863-458e-8a4f-c106de04eb4a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.681168] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 557.681168] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 557.681168] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.681462] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 557.681462] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.681462] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 557.681697] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 557.681881] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 557.683724] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 557.684209] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 557.684209] env[61570]: DEBUG nova.virt.hardware [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 557.686526] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c374c827-e403-4ff3-8136-d1660faf5d66 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.690955] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 557.690955] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52be2a1b-fc75-a525-113c-35397b0407d6" [ 557.690955] env[61570]: _type = "Task" [ 557.690955] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.700680] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Releasing lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.700680] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance network_info: |[{"id": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "address": "fa:16:3e:2e:90:a7", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165d0a2f-d5", "ovs_interfaceid": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 557.700925] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c504e6c-d8b0-4fa2-a11f-ae7def7a0eb2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.706154] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:90:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '165d0a2f-d58d-4eda-9164-d5d209992fd7', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.713050] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Creating folder: Project (a90ae0f6cf7e45b9a7408bdd6317387e). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.718021] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5a8ee3f-9708-460f-8d04-d7fc22e61bd0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.719662] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.719662] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.719878] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.733495] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Created folder: Project (a90ae0f6cf7e45b9a7408bdd6317387e) in parent group-v953072. [ 557.733495] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Creating folder: Instances. Parent ref: group-v953091. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 557.733495] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-200fee4f-2874-421f-8c9c-3a524cd55f34 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.744862] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Created folder: Instances in parent group-v953091. [ 557.744862] env[61570]: DEBUG oslo.service.loopingcall [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.745068] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 557.745273] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42390f92-627f-4b88-aba4-605772aa0f27 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.768715] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.768715] env[61570]: value = "task-4891259" [ 557.768715] env[61570]: _type = "Task" [ 557.768715] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.778517] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891259, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.027719] env[61570]: DEBUG nova.policy [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6aa4b2fd3a64460b01997c22a25163f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '779314e2630246b98c8b6a11c3f71890', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 558.118227] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Successfully created port: 366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.132484] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Received event network-changed-0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 558.132892] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Refreshing instance network info cache due to event network-changed-0dcf2e37-5e8f-4650-9f10-0144263fd756. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 558.133147] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Acquiring lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.133738] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Acquired lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.133738] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Refreshing network info cache for port 0dcf2e37-5e8f-4650-9f10-0144263fd756 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.287723] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891259, 'name': CreateVM_Task, 'duration_secs': 0.38414} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.291898] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 558.292624] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.292831] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.294934] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.296047] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c051075-0a9c-4f16-b0a9-f6154d179878 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.304372] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for the task: (returnval){ [ 558.304372] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]528be5c8-4f9a-34f3-9c14-39d8aeaa12c5" [ 558.304372] env[61570]: _type = "Task" [ 558.304372] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.318180] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.318500] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.318734] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.350013] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Successfully created port: f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.416196] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Updated VIF entry in instance network info cache for port 0dcf2e37-5e8f-4650-9f10-0144263fd756. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 559.416656] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Updating instance_info_cache with network_info: [{"id": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "address": "fa:16:3e:d4:e2:ce", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0dcf2e37-5e", "ovs_interfaceid": "0dcf2e37-5e8f-4650-9f10-0144263fd756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.432725] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Releasing lock "refresh_cache-24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.433197] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Received event network-vif-plugged-dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.433197] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Acquiring lock "20b016b6-a53d-4b1f-a7c7-539d1636091b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.433364] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Lock "20b016b6-a53d-4b1f-a7c7-539d1636091b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.433525] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Lock "20b016b6-a53d-4b1f-a7c7-539d1636091b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.433686] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] No waiting events found dispatching network-vif-plugged-dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 559.433853] env[61570]: WARNING nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Received unexpected event network-vif-plugged-dda317d0-cb04-4e2a-8e4a-ac825d4807ec for instance with vm_state building and task_state spawning. [ 559.434018] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Received event network-changed-dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 559.434178] env[61570]: DEBUG nova.compute.manager [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Refreshing instance network info cache due to event network-changed-dda317d0-cb04-4e2a-8e4a-ac825d4807ec. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 559.434358] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Acquiring lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.434496] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Acquired lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.434713] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Refreshing network info cache for port dda317d0-cb04-4e2a-8e4a-ac825d4807ec {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 560.088954] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Successfully created port: 58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.728478] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Updated VIF entry in instance network info cache for port dda317d0-cb04-4e2a-8e4a-ac825d4807ec. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 560.728758] env[61570]: DEBUG nova.network.neutron [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Updating instance_info_cache with network_info: [{"id": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "address": "fa:16:3e:29:bb:c4", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda317d0-cb", "ovs_interfaceid": "dda317d0-cb04-4e2a-8e4a-ac825d4807ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.758791] env[61570]: DEBUG oslo_concurrency.lockutils [req-d825328d-8af6-4c5f-9ad2-9334adad8ac8 req-0a385af2-d38e-479c-8c51-c49681e8aaf5 service nova] Releasing lock "refresh_cache-20b016b6-a53d-4b1f-a7c7-539d1636091b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.915175] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Successfully updated port: 366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 560.935710] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.937278] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquired lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.938792] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.020437] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.281164] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Received event network-changed-f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 561.281459] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Refreshing instance network info cache due to event network-changed-f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 561.281587] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Acquiring lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.281725] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Acquired lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.282188] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Refreshing network info cache for port f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.371296] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Updating instance_info_cache with network_info: [{"id": "366300e7-40d4-4c9b-966e-b0b720e659d9", "address": "fa:16:3e:e5:34:02", "network": {"id": "3ad6375b-f10c-42b1-af6b-08c23aec55ad", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1456412240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5beaa7046a57489b8b0ca03a79344d08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366300e7-40", "ovs_interfaceid": "366300e7-40d4-4c9b-966e-b0b720e659d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.396036] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Releasing lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.396036] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance network_info: |[{"id": "366300e7-40d4-4c9b-966e-b0b720e659d9", "address": "fa:16:3e:e5:34:02", "network": {"id": "3ad6375b-f10c-42b1-af6b-08c23aec55ad", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1456412240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5beaa7046a57489b8b0ca03a79344d08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366300e7-40", "ovs_interfaceid": "366300e7-40d4-4c9b-966e-b0b720e659d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 561.396360] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:34:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '366300e7-40d4-4c9b-966e-b0b720e659d9', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 561.408185] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Creating folder: Project (5beaa7046a57489b8b0ca03a79344d08). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 561.408860] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-350096d1-f235-4516-90dc-553444d830d5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.426785] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Created folder: Project (5beaa7046a57489b8b0ca03a79344d08) in parent group-v953072. [ 561.427065] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Creating folder: Instances. Parent ref: group-v953094. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 561.432010] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bca8651-f610-4f3f-857d-5993d7c30767 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.444682] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Created folder: Instances in parent group-v953094. [ 561.445161] env[61570]: DEBUG oslo.service.loopingcall [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.446044] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 561.446044] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b1bb6e0-0415-4ae1-a917-2d222a270401 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.472798] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 561.472798] env[61570]: value = "task-4891262" [ 561.472798] env[61570]: _type = "Task" [ 561.472798] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.488392] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891262, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.654241] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Successfully updated port: f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 561.673669] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.673669] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.673892] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.765931] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.985948] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891262, 'name': CreateVM_Task, 'duration_secs': 0.376792} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.986746] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 561.987991] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.989628] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.990279] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 561.995591] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-092edea2-dc5a-4fe1-9446-a2204b53e3a8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.003668] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for the task: (returnval){ [ 562.003668] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52be28a8-a5d4-c30c-77e5-ac260add9e0c" [ 562.003668] env[61570]: _type = "Task" [ 562.003668] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.022492] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52be28a8-a5d4-c30c-77e5-ac260add9e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.051714] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Received event network-vif-plugged-b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.052754] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquiring lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.053468] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.053468] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.053468] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] No waiting events found dispatching network-vif-plugged-b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.053660] env[61570]: WARNING nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Received unexpected event network-vif-plugged-b7ae3b0d-6c3d-4abc-a165-8573959c64a7 for instance with vm_state building and task_state spawning. [ 562.054252] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Received event network-changed-b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.054252] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Refreshing instance network info cache due to event network-changed-b7ae3b0d-6c3d-4abc-a165-8573959c64a7. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.054478] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquiring lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.055491] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquired lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.056295] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Refreshing network info cache for port b7ae3b0d-6c3d-4abc-a165-8573959c64a7 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.250107] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Updated VIF entry in instance network info cache for port f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 562.250935] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Updating instance_info_cache with network_info: [{"id": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "address": "fa:16:3e:22:bf:1e", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27ab8d1-17", "ovs_interfaceid": "f27ab8d1-175d-46e9-9bd6-f95ba2d7ce00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.266781] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Releasing lock "refresh_cache-8926a7b1-989f-4290-8828-1e75efbc0553" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.267171] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Received event network-vif-plugged-03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.267446] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Acquiring lock "766812ee-e272-4779-b85d-7fdde876e877-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.267724] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Lock "766812ee-e272-4779-b85d-7fdde876e877-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.267950] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Lock "766812ee-e272-4779-b85d-7fdde876e877-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.268200] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] No waiting events found dispatching network-vif-plugged-03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 562.268426] env[61570]: WARNING nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Received unexpected event network-vif-plugged-03b18258-5fde-464a-8a20-d20940879e4a for instance with vm_state building and task_state spawning. [ 562.268702] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Received event network-changed-03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 562.268874] env[61570]: DEBUG nova.compute.manager [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Refreshing instance network info cache due to event network-changed-03b18258-5fde-464a-8a20-d20940879e4a. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 562.269790] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Acquiring lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.270175] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Acquired lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.270545] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Refreshing network info cache for port 03b18258-5fde-464a-8a20-d20940879e4a {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 562.519837] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.520105] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 562.520321] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.521706] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Updating instance_info_cache with network_info: [{"id": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "address": "fa:16:3e:c0:f1:77", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27cc970-f7", "ovs_interfaceid": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.537129] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.537662] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance network_info: |[{"id": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "address": "fa:16:3e:c0:f1:77", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27cc970-f7", "ovs_interfaceid": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 562.539148] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:f1:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f27cc970-f7c7-4c38-9f86-4c2e1774f359', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 562.548528] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating folder: Project (41f716cba1d94cf28a341cc027112585). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.549462] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f999c38-7893-4cb0-abc8-0fec3b561b00 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.564229] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created folder: Project (41f716cba1d94cf28a341cc027112585) in parent group-v953072. [ 562.564443] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating folder: Instances. Parent ref: group-v953097. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.564678] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-875ef670-6b4c-489a-a8c3-6d15cb0e7731 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.575211] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created folder: Instances in parent group-v953097. [ 562.575469] env[61570]: DEBUG oslo.service.loopingcall [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.576162] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 562.576330] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2eb658b-438d-4837-894c-ac053f0ebcc0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.608413] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 562.608413] env[61570]: value = "task-4891265" [ 562.608413] env[61570]: _type = "Task" [ 562.608413] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.622469] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891265, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.003088] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Successfully updated port: 58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 563.019715] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.019815] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquired lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.019941] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.123192] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891265, 'name': CreateVM_Task} progress is 25%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.168700] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.358466] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Updated VIF entry in instance network info cache for port b7ae3b0d-6c3d-4abc-a165-8573959c64a7. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.358847] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Updating instance_info_cache with network_info: [{"id": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "address": "fa:16:3e:d6:90:21", "network": {"id": "1c7fa94e-5a76-4123-a424-d00d7157fc06", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1853833901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075f06a8aabb4d1d8ec26c7b3f341791", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50886eea-591a-452c-a27b-5f22cfc9df85", "external-id": "nsx-vlan-transportzone-578", "segmentation_id": 578, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ae3b0d-6c", "ovs_interfaceid": "b7ae3b0d-6c3d-4abc-a165-8573959c64a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.376470] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Releasing lock "refresh_cache-48b87f3a-879c-4578-90cf-3e4328299e81" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.376470] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Received event network-vif-plugged-165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 563.376470] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquiring lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.376470] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.376722] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.376722] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] No waiting events found dispatching network-vif-plugged-165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 563.376722] env[61570]: WARNING nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Received unexpected event network-vif-plugged-165d0a2f-d58d-4eda-9164-d5d209992fd7 for instance with vm_state building and task_state spawning. [ 563.376722] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Received event network-changed-165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 563.376841] env[61570]: DEBUG nova.compute.manager [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Refreshing instance network info cache due to event network-changed-165d0a2f-d58d-4eda-9164-d5d209992fd7. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 563.376841] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquiring lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.377194] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Acquired lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.377194] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Refreshing network info cache for port 165d0a2f-d58d-4eda-9164-d5d209992fd7 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.623601] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891265, 'name': CreateVM_Task, 'duration_secs': 0.717671} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.624613] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 563.624613] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.625114] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.628483] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 563.628483] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e929b5a6-445b-44ec-99d2-4d1c11236279 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.635069] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 563.635069] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]525f6831-842d-0bed-7ca5-dc4c823eb2a1" [ 563.635069] env[61570]: _type = "Task" [ 563.635069] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.647069] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]525f6831-842d-0bed-7ca5-dc4c823eb2a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.679137] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Updated VIF entry in instance network info cache for port 03b18258-5fde-464a-8a20-d20940879e4a. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 563.679137] env[61570]: DEBUG nova.network.neutron [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Updating instance_info_cache with network_info: [{"id": "03b18258-5fde-464a-8a20-d20940879e4a", "address": "fa:16:3e:a9:6e:5a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03b18258-5f", "ovs_interfaceid": "03b18258-5fde-464a-8a20-d20940879e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.693459] env[61570]: DEBUG oslo_concurrency.lockutils [req-6050d09d-c77e-4d09-ae67-f7aec6b09593 req-fe4342fa-26a6-492b-9e56-6be1a3a492e9 service nova] Releasing lock "refresh_cache-766812ee-e272-4779-b85d-7fdde876e877" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.980125] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Updating instance_info_cache with network_info: [{"id": "58a8fe87-5571-495b-ad70-3487fa9cff54", "address": "fa:16:3e:6f:9d:df", "network": {"id": "b08d4650-723a-4574-80bb-203889988cba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2017569618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "779314e2630246b98c8b6a11c3f71890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a8fe87-55", "ovs_interfaceid": "58a8fe87-5571-495b-ad70-3487fa9cff54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.000696] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Releasing lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.001014] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance network_info: |[{"id": "58a8fe87-5571-495b-ad70-3487fa9cff54", "address": "fa:16:3e:6f:9d:df", "network": {"id": "b08d4650-723a-4574-80bb-203889988cba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2017569618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "779314e2630246b98c8b6a11c3f71890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a8fe87-55", "ovs_interfaceid": "58a8fe87-5571-495b-ad70-3487fa9cff54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 564.002018] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:9d:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8059554c-499f-44b4-be06-29f80ec36b34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a8fe87-5571-495b-ad70-3487fa9cff54', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 564.009724] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Creating folder: Project (779314e2630246b98c8b6a11c3f71890). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 564.010541] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fea51100-87c4-4a68-a785-136eec3eadcd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.024808] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Created folder: Project (779314e2630246b98c8b6a11c3f71890) in parent group-v953072. [ 564.026141] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Creating folder: Instances. Parent ref: group-v953100. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 564.026141] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9bf87c5-320e-4617-830c-ecc48fa0b6d6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.038789] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Created folder: Instances in parent group-v953100. [ 564.039481] env[61570]: DEBUG oslo.service.loopingcall [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.039755] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 564.039991] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c4a8c45-8c5b-4d1f-99a4-7856373fa17d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.072922] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 564.072922] env[61570]: value = "task-4891268" [ 564.072922] env[61570]: _type = "Task" [ 564.072922] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.087804] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891268, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.150294] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.150684] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 564.150949] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.482939] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Updated VIF entry in instance network info cache for port 165d0a2f-d58d-4eda-9164-d5d209992fd7. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 564.483582] env[61570]: DEBUG nova.network.neutron [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Updating instance_info_cache with network_info: [{"id": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "address": "fa:16:3e:2e:90:a7", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165d0a2f-d5", "ovs_interfaceid": "165d0a2f-d58d-4eda-9164-d5d209992fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.501552] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bba68f1-9a64-4bbe-9e54-ff407463133e req-68a6d540-dea6-4583-b53f-390e5dae8887 service nova] Releasing lock "refresh_cache-25e22032-2ee7-44df-ae6a-022b5bda9f2c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.588756] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891268, 'name': CreateVM_Task, 'duration_secs': 0.35703} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.589104] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 564.591553] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.591553] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.591553] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 564.591553] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9298b66e-1b2e-4501-b631-f43971a88cfc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.600173] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for the task: (returnval){ [ 564.600173] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52187730-7b34-159c-b703-7eb1bc7f4f69" [ 564.600173] env[61570]: _type = "Task" [ 564.600173] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.610450] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52187730-7b34-159c-b703-7eb1bc7f4f69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.110081] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.110081] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 565.110081] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.291727] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Received event network-vif-plugged-f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 565.292009] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquiring lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.292602] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.292864] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.293151] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] No waiting events found dispatching network-vif-plugged-f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 565.293371] env[61570]: WARNING nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Received unexpected event network-vif-plugged-f27cc970-f7c7-4c38-9f86-4c2e1774f359 for instance with vm_state building and task_state spawning. [ 565.293621] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Received event network-changed-f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 565.294239] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Refreshing instance network info cache due to event network-changed-f27cc970-f7c7-4c38-9f86-4c2e1774f359. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 565.294239] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquiring lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.294383] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquired lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.294679] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Refreshing network info cache for port f27cc970-f7c7-4c38-9f86-4c2e1774f359 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 566.849091] env[61570]: DEBUG nova.compute.manager [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Received event network-vif-plugged-366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 566.849091] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Acquiring lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.849091] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.849091] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.849496] env[61570]: DEBUG nova.compute.manager [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] No waiting events found dispatching network-vif-plugged-366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 566.849772] env[61570]: WARNING nova.compute.manager [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Received unexpected event network-vif-plugged-366300e7-40d4-4c9b-966e-b0b720e659d9 for instance with vm_state building and task_state spawning. [ 566.849772] env[61570]: DEBUG nova.compute.manager [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Received event network-changed-366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 566.849879] env[61570]: DEBUG nova.compute.manager [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Refreshing instance network info cache due to event network-changed-366300e7-40d4-4c9b-966e-b0b720e659d9. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 566.850290] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Acquiring lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.850290] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Acquired lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.852044] env[61570]: DEBUG nova.network.neutron [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Refreshing network info cache for port 366300e7-40d4-4c9b-966e-b0b720e659d9 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 566.861158] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Updated VIF entry in instance network info cache for port f27cc970-f7c7-4c38-9f86-4c2e1774f359. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 566.861513] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Updating instance_info_cache with network_info: [{"id": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "address": "fa:16:3e:c0:f1:77", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf27cc970-f7", "ovs_interfaceid": "f27cc970-f7c7-4c38-9f86-4c2e1774f359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.878794] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Releasing lock "refresh_cache-ded35886-716c-4725-8fc9-cd6dfc04281a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.878794] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Received event network-vif-plugged-58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 566.879094] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquiring lock "12435283-c350-4d85-be82-1c85e1ea17be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.880034] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Lock "12435283-c350-4d85-be82-1c85e1ea17be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.880034] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Lock "12435283-c350-4d85-be82-1c85e1ea17be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.880034] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] No waiting events found dispatching network-vif-plugged-58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 566.880034] env[61570]: WARNING nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Received unexpected event network-vif-plugged-58a8fe87-5571-495b-ad70-3487fa9cff54 for instance with vm_state building and task_state spawning. [ 566.880303] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Received event network-changed-58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 566.880303] env[61570]: DEBUG nova.compute.manager [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Refreshing instance network info cache due to event network-changed-58a8fe87-5571-495b-ad70-3487fa9cff54. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 566.880303] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquiring lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.880450] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Acquired lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.880520] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Refreshing network info cache for port 58a8fe87-5571-495b-ad70-3487fa9cff54 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 567.904702] env[61570]: DEBUG nova.network.neutron [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Updated VIF entry in instance network info cache for port 366300e7-40d4-4c9b-966e-b0b720e659d9. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 567.906486] env[61570]: DEBUG nova.network.neutron [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Updating instance_info_cache with network_info: [{"id": "366300e7-40d4-4c9b-966e-b0b720e659d9", "address": "fa:16:3e:e5:34:02", "network": {"id": "3ad6375b-f10c-42b1-af6b-08c23aec55ad", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1456412240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5beaa7046a57489b8b0ca03a79344d08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap366300e7-40", "ovs_interfaceid": "366300e7-40d4-4c9b-966e-b0b720e659d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.921920] env[61570]: DEBUG oslo_concurrency.lockutils [req-b342a14f-443e-4923-85e6-8c8ac0ce4c34 req-6ee14a7f-ea1d-4175-a0f8-0a568413438f service nova] Releasing lock "refresh_cache-bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.073361] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Updated VIF entry in instance network info cache for port 58a8fe87-5571-495b-ad70-3487fa9cff54. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 568.073706] env[61570]: DEBUG nova.network.neutron [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Updating instance_info_cache with network_info: [{"id": "58a8fe87-5571-495b-ad70-3487fa9cff54", "address": "fa:16:3e:6f:9d:df", "network": {"id": "b08d4650-723a-4574-80bb-203889988cba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2017569618-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "779314e2630246b98c8b6a11c3f71890", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a8fe87-55", "ovs_interfaceid": "58a8fe87-5571-495b-ad70-3487fa9cff54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.095379] env[61570]: DEBUG oslo_concurrency.lockutils [req-e488eadf-aead-4f75-8a20-724705f6208a req-53233be8-a7fb-4baf-ad27-53b18151a548 service nova] Releasing lock "refresh_cache-12435283-c350-4d85-be82-1c85e1ea17be" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.512088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.512375] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.400700] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.402038] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.330499] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "0a37f623-f757-4f67-a796-a8e17cfb9496" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.330914] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.873967] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.874238] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.181351] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bba7f229-b350-4365-aa2b-40e5bd4d8ac1 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Acquiring lock "85acb509-fb8a-4f23-90a9-de4fb12fd5dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.181802] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bba7f229-b350-4365-aa2b-40e5bd4d8ac1 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "85acb509-fb8a-4f23-90a9-de4fb12fd5dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.588367] env[61570]: DEBUG oslo_concurrency.lockutils [None req-254e8122-3278-48f1-8d1c-c16e4c21d0c5 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Acquiring lock "cec56dfe-ec77-4824-8751-43f85b57c6d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.588653] env[61570]: DEBUG oslo_concurrency.lockutils [None req-254e8122-3278-48f1-8d1c-c16e4c21d0c5 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Lock "cec56dfe-ec77-4824-8751-43f85b57c6d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.628203] env[61570]: DEBUG oslo_concurrency.lockutils [None req-34d8d5b4-e887-4d72-a038-cdf2af5118e7 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Acquiring lock "771b52c2-234d-47ad-af34-11cf0d68f5e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.628726] env[61570]: DEBUG oslo_concurrency.lockutils [None req-34d8d5b4-e887-4d72-a038-cdf2af5118e7 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "771b52c2-234d-47ad-af34-11cf0d68f5e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.846031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f18f0c08-1ebb-4287-9104-72ac1342a1a8 tempest-ServersWithSpecificFlavorTestJSON-198724013 tempest-ServersWithSpecificFlavorTestJSON-198724013-project-member] Acquiring lock "0a39a01c-4e3f-4031-98a2-2a12c492a2ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.846353] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f18f0c08-1ebb-4287-9104-72ac1342a1a8 tempest-ServersWithSpecificFlavorTestJSON-198724013 tempest-ServersWithSpecificFlavorTestJSON-198724013-project-member] Lock "0a39a01c-4e3f-4031-98a2-2a12c492a2ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.328114] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a551d30d-e931-460b-ae62-c9d0a8c0f43e tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Acquiring lock "76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.328402] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a551d30d-e931-460b-ae62-c9d0a8c0f43e tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.377018] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c5bf6c36-1fd3-46f3-aea4-e42bdfd7d749 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Acquiring lock "7a5f2f6a-db7f-410d-96cf-376be4ef6dc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.377018] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c5bf6c36-1fd3-46f3-aea4-e42bdfd7d749 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Lock "7a5f2f6a-db7f-410d-96cf-376be4ef6dc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.649693] env[61570]: DEBUG oslo_concurrency.lockutils [None req-91511a3d-5e93-49cb-881f-ecf7b421fa59 tempest-ServerDiagnosticsV248Test-1075744459 tempest-ServerDiagnosticsV248Test-1075744459-project-member] Acquiring lock "63eeeab2-6aa3-49c9-b76b-09cc81f8d269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.650096] env[61570]: DEBUG oslo_concurrency.lockutils [None req-91511a3d-5e93-49cb-881f-ecf7b421fa59 tempest-ServerDiagnosticsV248Test-1075744459 tempest-ServerDiagnosticsV248Test-1075744459-project-member] Lock "63eeeab2-6aa3-49c9-b76b-09cc81f8d269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.370937] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bfa8afda-590c-4c92-ae7f-633518142711 tempest-ServerExternalEventsTest-554257046 tempest-ServerExternalEventsTest-554257046-project-member] Acquiring lock "8962f2e6-007f-47be-8c56-bb33c8354287" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.371192] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bfa8afda-590c-4c92-ae7f-633518142711 tempest-ServerExternalEventsTest-554257046 tempest-ServerExternalEventsTest-554257046-project-member] Lock "8962f2e6-007f-47be-8c56-bb33c8354287" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.644279] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a4f1c4-b46a-4f4b-b842-5a00b3348184 tempest-ServerActionsTestOtherA-2122794491 tempest-ServerActionsTestOtherA-2122794491-project-member] Acquiring lock "9f25c9d9-4936-4773-8fc3-bf52648752be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.644581] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a4f1c4-b46a-4f4b-b842-5a00b3348184 tempest-ServerActionsTestOtherA-2122794491 tempest-ServerActionsTestOtherA-2122794491-project-member] Lock "9f25c9d9-4936-4773-8fc3-bf52648752be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.133608] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ccf76edd-2927-4ff4-80bc-5750b5ba77ee tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.133912] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ccf76edd-2927-4ff4-80bc-5750b5ba77ee tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.448127] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Acquiring lock "243873bb-c6d0-4212-8bc6-5512044b9025" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.448127] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Lock "243873bb-c6d0-4212-8bc6-5512044b9025" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.937711] env[61570]: WARNING oslo_vmware.rw_handles [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 598.937711] env[61570]: ERROR oslo_vmware.rw_handles [ 598.938581] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 598.940790] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 598.940790] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Copying Virtual Disk [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/6e59e766-f29f-442f-8a1b-af66f7c35310/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 598.943085] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f32dfb5-26f7-42bc-8db0-34e2f6bdb25a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.951908] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for the task: (returnval){ [ 598.951908] env[61570]: value = "task-4891280" [ 598.951908] env[61570]: _type = "Task" [ 598.951908] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.963377] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Task: {'id': task-4891280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.470549] env[61570]: DEBUG oslo_vmware.exceptions [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 599.471045] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.475124] env[61570]: ERROR nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 599.475124] env[61570]: Faults: ['InvalidArgument'] [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Traceback (most recent call last): [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] yield resources [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self.driver.spawn(context, instance, image_meta, [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self._fetch_image_if_missing(context, vi) [ 599.475124] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] image_cache(vi, tmp_image_ds_loc) [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] vm_util.copy_virtual_disk( [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] session._wait_for_task(vmdk_copy_task) [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return self.wait_for_task(task_ref) [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return evt.wait() [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] result = hub.switch() [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.475486] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return self.greenlet.switch() [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self.f(*self.args, **self.kw) [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] raise exceptions.translate_fault(task_info.error) [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Faults: ['InvalidArgument'] [ 599.475867] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] [ 599.475867] env[61570]: INFO nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Terminating instance [ 599.477983] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.478559] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquired lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.478559] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 599.480280] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.480772] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.481254] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2da988bc-f282-4875-8355-edea78b765d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.498246] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.498469] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 599.499873] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae782f31-91a9-4ee5-9d2b-61d6e0f6a2c3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.507021] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Waiting for the task: (returnval){ [ 599.507021] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c4e3c5-497f-b63a-de93-94b4930b19cc" [ 599.507021] env[61570]: _type = "Task" [ 599.507021] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.518121] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c4e3c5-497f-b63a-de93-94b4930b19cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.529257] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.648579] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.659655] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Releasing lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.660325] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 599.660578] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 599.661764] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4f79d6-a959-4380-be44-a6c316d70d48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.673147] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 599.673147] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6b39463-9b1b-4acc-905d-38e18f469fe1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.705891] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 599.706136] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 599.706399] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Deleting the datastore file [datastore2] a95948a6-bad7-4cf0-902f-7c7bb4157d2b {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 599.706585] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9626f4c5-703a-4c25-808e-27d3cfb13566 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.714044] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for the task: (returnval){ [ 599.714044] env[61570]: value = "task-4891282" [ 599.714044] env[61570]: _type = "Task" [ 599.714044] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.723544] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Task: {'id': task-4891282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.019939] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 600.020226] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Creating directory with path [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.020622] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a64cb68-65eb-4bb9-823c-43a29c2f66e5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.034758] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Created directory with path [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.034979] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Fetch image to [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 600.035169] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 600.035979] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1d6b84-4d79-41ed-9586-13bed1ebe4ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.045381] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3640ea37-a601-41f6-985b-bb4a9dd61d1e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.057143] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f370314e-77d7-43ae-ac5f-8f737b8f650c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.092744] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089e321b-a828-48f9-8a7d-8b742df38c2c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.100176] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-37f41b24-33ab-46f2-a5c1-9bbdb726b348 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.132479] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 600.199481] env[61570]: DEBUG oslo_vmware.rw_handles [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 600.261451] env[61570]: DEBUG oslo_vmware.rw_handles [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 600.261451] env[61570]: DEBUG oslo_vmware.rw_handles [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 600.266399] env[61570]: DEBUG oslo_vmware.api [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Task: {'id': task-4891282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0396} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.266995] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.266995] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 600.267196] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 600.267456] env[61570]: INFO nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 600.267710] env[61570]: DEBUG oslo.service.loopingcall [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.268078] env[61570]: DEBUG nova.compute.manager [-] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Skipping network deallocation for instance since networking was not requested. {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 600.271015] env[61570]: DEBUG nova.compute.claims [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 600.271233] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.271370] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.722845] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0924b0c3-faf2-4341-83d2-54512ba4ad08 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.734784] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7a78cf-ae24-4cd2-ac45-5cade0bd75ce {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.765645] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a37301d-345e-4ab6-8387-a5bf4fefd99b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.773712] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0aae639-9ee8-417e-af88-0eedbd6c6953 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.788801] env[61570]: DEBUG nova.compute.provider_tree [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.801903] env[61570]: DEBUG nova.scheduler.client.report [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.823097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.552s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.823663] env[61570]: ERROR nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.823663] env[61570]: Faults: ['InvalidArgument'] [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Traceback (most recent call last): [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self.driver.spawn(context, instance, image_meta, [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self._fetch_image_if_missing(context, vi) [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] image_cache(vi, tmp_image_ds_loc) [ 600.823663] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] vm_util.copy_virtual_disk( [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] session._wait_for_task(vmdk_copy_task) [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return self.wait_for_task(task_ref) [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return evt.wait() [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] result = hub.switch() [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] return self.greenlet.switch() [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 600.824076] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] self.f(*self.args, **self.kw) [ 600.824602] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 600.824602] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] raise exceptions.translate_fault(task_info.error) [ 600.824602] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.824602] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Faults: ['InvalidArgument'] [ 600.824602] env[61570]: ERROR nova.compute.manager [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] [ 600.824602] env[61570]: DEBUG nova.compute.utils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.829673] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Build of instance a95948a6-bad7-4cf0-902f-7c7bb4157d2b was re-scheduled: A specified parameter was not correct: fileType [ 600.829673] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 600.830182] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 600.830424] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquiring lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.830570] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Acquired lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.830956] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.866805] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.973875] env[61570]: DEBUG nova.network.neutron [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.995270] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Releasing lock "refresh_cache-a95948a6-bad7-4cf0-902f-7c7bb4157d2b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.995512] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 600.995697] env[61570]: DEBUG nova.compute.manager [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] [instance: a95948a6-bad7-4cf0-902f-7c7bb4157d2b] Skipping network deallocation for instance since networking was not requested. {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 601.151518] env[61570]: INFO nova.scheduler.client.report [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Deleted allocations for instance a95948a6-bad7-4cf0-902f-7c7bb4157d2b [ 601.195710] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e2106a67-ace2-4816-abe4-35426aec7457 tempest-ServersAdmin275Test-1136515034 tempest-ServersAdmin275Test-1136515034-project-member] Lock "a95948a6-bad7-4cf0-902f-7c7bb4157d2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.591s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.255694] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 601.263588] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.263588] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.357228] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.358193] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.359763] env[61570]: INFO nova.compute.claims [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.863211] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6036eb-9640-4a61-bf2a-dad1b6400e4f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.871996] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c459b3a-1c96-4608-a210-ed64372d0d42 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.907869] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa90a1b-6cf4-486d-ac8d-a28a2a627a93 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.916959] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6d8d3a-96e5-4bd6-82bb-877de62aff7c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.937077] env[61570]: DEBUG nova.compute.provider_tree [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.951315] env[61570]: DEBUG nova.scheduler.client.report [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.975555] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.617s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.976039] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 602.028261] env[61570]: DEBUG nova.compute.utils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 602.031618] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 602.031618] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 602.064744] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 602.149992] env[61570]: DEBUG nova.policy [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9271593e70c042b3aafb22bcaa7e2a62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1820e4eb4e7d4d62938f022b7a1c8fc4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 602.168681] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 602.209668] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.209915] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.210643] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.210643] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.210643] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.210792] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.211067] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.211306] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.211479] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.211634] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.211823] env[61570]: DEBUG nova.virt.hardware [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.215023] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfe0c08-8f95-4b30-9a4b-bbccd3a0378f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.221796] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45689f89-b9b6-41a9-b5a5-1151bc4d2091 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.542888] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d5630fa8-d955-41e3-87d0-d690fe6f2afc tempest-ImagesOneServerNegativeTestJSON-2032486270 tempest-ImagesOneServerNegativeTestJSON-2032486270-project-member] Acquiring lock "966ce8b7-1a10-46f7-b113-e191cdc9a6ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.543145] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d5630fa8-d955-41e3-87d0-d690fe6f2afc tempest-ImagesOneServerNegativeTestJSON-2032486270 tempest-ImagesOneServerNegativeTestJSON-2032486270-project-member] Lock "966ce8b7-1a10-46f7-b113-e191cdc9a6ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.764135] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Successfully created port: 0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.827107] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Successfully updated port: 0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.844532] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.844532] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquired lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.844532] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.907217] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.210451] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Updating instance_info_cache with network_info: [{"id": "0960779e-a00a-4199-b6d5-fde434684d4e", "address": "fa:16:3e:d3:b2:9c", "network": {"id": "39f87960-4c89-47e6-b8f8-be7f0c7bd048", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-750191235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1820e4eb4e7d4d62938f022b7a1c8fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0960779e-a0", "ovs_interfaceid": "0960779e-a00a-4199-b6d5-fde434684d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.227857] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Releasing lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.228220] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance network_info: |[{"id": "0960779e-a00a-4199-b6d5-fde434684d4e", "address": "fa:16:3e:d3:b2:9c", "network": {"id": "39f87960-4c89-47e6-b8f8-be7f0c7bd048", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-750191235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1820e4eb4e7d4d62938f022b7a1c8fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0960779e-a0", "ovs_interfaceid": "0960779e-a00a-4199-b6d5-fde434684d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 604.228932] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:b2:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62d6a386-ffdb-4232-83f3-cb21c5e59e85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0960779e-a00a-4199-b6d5-fde434684d4e', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.238235] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Creating folder: Project (1820e4eb4e7d4d62938f022b7a1c8fc4). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.238778] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a315c0c0-d7ea-4437-90ac-da8accd2ceb3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.252290] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Created folder: Project (1820e4eb4e7d4d62938f022b7a1c8fc4) in parent group-v953072. [ 604.252754] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Creating folder: Instances. Parent ref: group-v953107. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.252860] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89279ae3-58c6-40cb-a827-321bb3098519 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.263240] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Created folder: Instances in parent group-v953107. [ 604.263497] env[61570]: DEBUG oslo.service.loopingcall [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.263691] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 604.263891] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82ea105c-88f1-417e-8f72-98688c30a760 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.288148] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.288148] env[61570]: value = "task-4891285" [ 604.288148] env[61570]: _type = "Task" [ 604.288148] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.300725] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891285, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.802973] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891285, 'name': CreateVM_Task, 'duration_secs': 0.353203} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.803184] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 604.805272] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.805471] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.805912] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 604.806198] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d1cb04-d8b1-4cf5-accf-ced8d008c27a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.813210] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for the task: (returnval){ [ 604.813210] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5293e192-a41d-05e0-ee31-397ac639fb34" [ 604.813210] env[61570]: _type = "Task" [ 604.813210] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.824054] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5293e192-a41d-05e0-ee31-397ac639fb34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.890915] env[61570]: DEBUG nova.compute.manager [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Received event network-vif-plugged-0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 604.891516] env[61570]: DEBUG oslo_concurrency.lockutils [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] Acquiring lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.891796] env[61570]: DEBUG oslo_concurrency.lockutils [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.892029] env[61570]: DEBUG oslo_concurrency.lockutils [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.892237] env[61570]: DEBUG nova.compute.manager [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] No waiting events found dispatching network-vif-plugged-0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 604.892446] env[61570]: WARNING nova.compute.manager [req-5f0ab1b7-4bb5-4538-b2f8-c903d58890cf req-f14c9c9c-e435-4d54-8fe1-1671828327f0 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Received unexpected event network-vif-plugged-0960779e-a00a-4199-b6d5-fde434684d4e for instance with vm_state building and task_state spawning. [ 605.326528] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.327263] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.328403] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.459774] env[61570]: DEBUG nova.compute.manager [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Received event network-changed-0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 608.459774] env[61570]: DEBUG nova.compute.manager [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Refreshing instance network info cache due to event network-changed-0960779e-a00a-4199-b6d5-fde434684d4e. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 608.459774] env[61570]: DEBUG oslo_concurrency.lockutils [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] Acquiring lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.459774] env[61570]: DEBUG oslo_concurrency.lockutils [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] Acquired lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.460400] env[61570]: DEBUG nova.network.neutron [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Refreshing network info cache for port 0960779e-a00a-4199-b6d5-fde434684d4e {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 608.824117] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2d949050-c65c-47e3-a7e4-9ed039be8712 tempest-ServerAddressesNegativeTestJSON-289184649 tempest-ServerAddressesNegativeTestJSON-289184649-project-member] Acquiring lock "82b9c976-583a-46f1-b412-87dd225dba12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.824117] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2d949050-c65c-47e3-a7e4-9ed039be8712 tempest-ServerAddressesNegativeTestJSON-289184649 tempest-ServerAddressesNegativeTestJSON-289184649-project-member] Lock "82b9c976-583a-46f1-b412-87dd225dba12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.029849] env[61570]: DEBUG nova.network.neutron [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Updated VIF entry in instance network info cache for port 0960779e-a00a-4199-b6d5-fde434684d4e. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 609.029849] env[61570]: DEBUG nova.network.neutron [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Updating instance_info_cache with network_info: [{"id": "0960779e-a00a-4199-b6d5-fde434684d4e", "address": "fa:16:3e:d3:b2:9c", "network": {"id": "39f87960-4c89-47e6-b8f8-be7f0c7bd048", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-750191235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1820e4eb4e7d4d62938f022b7a1c8fc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0960779e-a0", "ovs_interfaceid": "0960779e-a00a-4199-b6d5-fde434684d4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.039604] env[61570]: DEBUG oslo_concurrency.lockutils [req-47048ae7-cd34-441b-b493-6d4417d0a876 req-7bb5d1a7-753c-4481-bcbc-f6a5fa161a19 service nova] Releasing lock "refresh_cache-e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.221470] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a4f923b0-7957-4d7f-a4b2-819fa77dbac5 tempest-ServerRescueTestJSON-316296412 tempest-ServerRescueTestJSON-316296412-project-member] Acquiring lock "fb6a424d-c4b0-4913-a4ef-aa361ff25101" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.221470] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a4f923b0-7957-4d7f-a4b2-819fa77dbac5 tempest-ServerRescueTestJSON-316296412 tempest-ServerRescueTestJSON-316296412-project-member] Lock "fb6a424d-c4b0-4913-a4ef-aa361ff25101" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.314824] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.347836] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.347836] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 615.347836] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 615.371322] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371322] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371322] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371322] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371322] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371973] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371973] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371973] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371973] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.371973] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 615.372167] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 615.372431] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.372678] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 615.753606] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.753849] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.771031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.771031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.771031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.771031] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 615.771512] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5a1472-9268-4dbb-a83c-1c960bf5dbd6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.782907] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5581370a-ee16-4937-ade5-1b35891b7603 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.805659] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ad59c9-3bca-4703-bd16-b81e741fd4db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.814723] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c57e17c-3573-4eda-b7e1-e322823c8d93 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.847850] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180567MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 615.848045] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.848603] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.929785] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.929997] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8926a7b1-989f-4290-8828-1e75efbc0553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930182] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 766812ee-e272-4779-b85d-7fdde876e877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930308] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20b016b6-a53d-4b1f-a7c7-539d1636091b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930427] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 48b87f3a-879c-4578-90cf-3e4328299e81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930543] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930659] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930774] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.930885] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.934022] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 615.944069] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.973886] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 615.988570] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.003198] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 85acb509-fb8a-4f23-90a9-de4fb12fd5dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.022737] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance cec56dfe-ec77-4824-8751-43f85b57c6d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.033926] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 771b52c2-234d-47ad-af34-11cf0d68f5e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.047112] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a39a01c-4e3f-4031-98a2-2a12c492a2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.058749] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.072858] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7a5f2f6a-db7f-410d-96cf-376be4ef6dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.084999] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 63eeeab2-6aa3-49c9-b76b-09cc81f8d269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.098041] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8962f2e6-007f-47be-8c56-bb33c8354287 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.108207] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9f25c9d9-4936-4773-8fc3-bf52648752be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.123118] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.134746] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 243873bb-c6d0-4212-8bc6-5512044b9025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.149087] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.162572] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 966ce8b7-1a10-46f7-b113-e191cdc9a6ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.174600] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 82b9c976-583a-46f1-b412-87dd225dba12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.187366] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fb6a424d-c4b0-4913-a4ef-aa361ff25101 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 616.187616] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 616.187783] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '1', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_2550f62320f14e59961625dff376b2ea': '1', 'io_workload': '10', 'num_proj_b65c16386e5e420d9e1ca6fc7aa13b49': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_075f06a8aabb4d1d8ec26c7b3f341791': '1', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 616.628019] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dda7c8-6666-45e6-ab51-caa1c8c52c3e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.635971] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403e9bef-231b-4225-a132-f17257ce5d10 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.677682] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ceae8a-a65c-44d2-bfa8-43f1d6a42f91 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.686897] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f42b1e-f008-43f4-872e-d30eaa1c33d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.701286] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.744964] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.774408] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 616.774614] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.926s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.037671] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6a9413ab-bb7b-40ed-86d3-479f5af3d24b tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "497d6954-60e5-4a83-932c-c95de38b6f7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.038267] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6a9413ab-bb7b-40ed-86d3-479f5af3d24b tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "497d6954-60e5-4a83-932c-c95de38b6f7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.769180] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.769493] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.769583] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.769729] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.769887] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.270899] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a754c5e3-59a8-422c-b60b-265993097e13 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Acquiring lock "abf366b6-380a-4108-b351-8a2f2cceb018" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.271382] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a754c5e3-59a8-422c-b60b-265993097e13 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "abf366b6-380a-4108-b351-8a2f2cceb018" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.525718] env[61570]: WARNING oslo_vmware.rw_handles [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 648.525718] env[61570]: ERROR oslo_vmware.rw_handles [ 648.526355] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 648.527649] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 648.527906] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Copying Virtual Disk [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/92833f14-1ede-42c1-a7da-5337a4355c67/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 648.528356] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3408ab3b-4630-4dfd-a7b2-e7e5337c64a9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.537106] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Waiting for the task: (returnval){ [ 648.537106] env[61570]: value = "task-4891286" [ 648.537106] env[61570]: _type = "Task" [ 648.537106] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.547558] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Task: {'id': task-4891286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.048018] env[61570]: DEBUG oslo_vmware.exceptions [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 649.048532] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.049260] env[61570]: ERROR nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 649.049260] env[61570]: Faults: ['InvalidArgument'] [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Traceback (most recent call last): [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] yield resources [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self.driver.spawn(context, instance, image_meta, [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self._fetch_image_if_missing(context, vi) [ 649.049260] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] image_cache(vi, tmp_image_ds_loc) [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] vm_util.copy_virtual_disk( [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] session._wait_for_task(vmdk_copy_task) [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return self.wait_for_task(task_ref) [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return evt.wait() [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] result = hub.switch() [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.050026] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return self.greenlet.switch() [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self.f(*self.args, **self.kw) [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] raise exceptions.translate_fault(task_info.error) [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Faults: ['InvalidArgument'] [ 649.050854] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] [ 649.050854] env[61570]: INFO nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Terminating instance [ 649.052098] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.052473] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.053467] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 649.053835] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 649.054250] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8e02b46-1fcb-412d-93c6-1b35e2c0c510 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.057681] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afbe44a-62fc-49ca-8386-56fc991865e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.065602] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 649.066482] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61b708d6-c4d9-463b-9919-c24d6ce47259 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.069874] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.070136] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 649.071275] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54864e03-984d-47df-a15d-f87a437a9d50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.078339] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 649.078339] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52d567c4-e701-1ce6-795d-01d7f258cd04" [ 649.078339] env[61570]: _type = "Task" [ 649.078339] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.094621] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 649.094938] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.095250] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84a3943e-59ad-4c17-b367-fe128ad1912d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.117967] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.118273] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Fetch image to [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 649.118470] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 649.119308] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86f767a-69d5-4d0e-a600-8e2cb8103437 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.128421] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb70cf6-fb47-49e9-8f63-f68e01eeb7fd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.139327] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f9485-a193-4e79-88e8-2325b00174aa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.145458] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 649.145686] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 649.145860] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Deleting the datastore file [datastore2] 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 649.146142] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c68e38cc-a9eb-467b-8022-9d4f78316d0d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.176134] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98051110-f8ea-4600-9b2c-ed37d2a0cdc5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.179115] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Waiting for the task: (returnval){ [ 649.179115] env[61570]: value = "task-4891288" [ 649.179115] env[61570]: _type = "Task" [ 649.179115] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.184685] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dd0d77d0-ea4c-4c27-a418-b245678f84bb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.189114] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Task: {'id': task-4891288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.282691] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 649.341803] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 649.402111] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 649.402322] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 649.690032] env[61570]: DEBUG oslo_vmware.api [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Task: {'id': task-4891288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188437} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.690032] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 649.690032] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 649.690032] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 649.690032] env[61570]: INFO nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Took 0.64 seconds to destroy the instance on the hypervisor. [ 649.693440] env[61570]: DEBUG nova.compute.claims [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.693673] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.694467] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.114090] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1634329a-20ee-4ae9-8053-2c90ed761fa6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.122165] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38401f24-2fa7-474f-a098-b35e5ef10575 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.152611] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1028a770-72f4-4484-b301-fbaec8a6be50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.160533] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7d62fe-9b9c-4e68-bc11-3214591a40d5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.173813] env[61570]: DEBUG nova.compute.provider_tree [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.183219] env[61570]: DEBUG nova.scheduler.client.report [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 650.205269] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.511s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.205882] env[61570]: ERROR nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 650.205882] env[61570]: Faults: ['InvalidArgument'] [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Traceback (most recent call last): [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self.driver.spawn(context, instance, image_meta, [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self._fetch_image_if_missing(context, vi) [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] image_cache(vi, tmp_image_ds_loc) [ 650.205882] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] vm_util.copy_virtual_disk( [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] session._wait_for_task(vmdk_copy_task) [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return self.wait_for_task(task_ref) [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return evt.wait() [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] result = hub.switch() [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] return self.greenlet.switch() [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 650.206295] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] self.f(*self.args, **self.kw) [ 650.206656] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 650.206656] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] raise exceptions.translate_fault(task_info.error) [ 650.206656] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 650.206656] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Faults: ['InvalidArgument'] [ 650.206656] env[61570]: ERROR nova.compute.manager [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] [ 650.206656] env[61570]: DEBUG nova.compute.utils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 650.208574] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Build of instance 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c was re-scheduled: A specified parameter was not correct: fileType [ 650.208574] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 650.209012] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 650.209198] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 650.209363] env[61570]: DEBUG nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 650.209526] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.842315] env[61570]: DEBUG nova.network.neutron [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.858756] env[61570]: INFO nova.compute.manager [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] [instance: 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c] Took 0.65 seconds to deallocate network for instance. [ 650.977173] env[61570]: INFO nova.scheduler.client.report [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Deleted allocations for instance 24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c [ 650.998699] env[61570]: DEBUG oslo_concurrency.lockutils [None req-33e23eb5-28be-4151-b6f3-3b5895bbbeae tempest-ServersAdminNegativeTestJSON-1937359102 tempest-ServersAdminNegativeTestJSON-1937359102-project-member] Lock "24e79ca7-fe0e-4b3e-8db9-c2b26ca58b7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.914s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.042844] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 651.113422] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.113422] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.114914] env[61570]: INFO nova.compute.claims [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.561695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cd6aa2-8dee-4d64-a348-7107fde48c26 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.570008] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e04d1a-a91d-46b5-b2e3-2f17d8f26732 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.599979] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f507d685-8a3a-4963-9ae9-e3ae9a2a873c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.609591] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d050e4-af99-4227-990f-c69d199a8832 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.624557] env[61570]: DEBUG nova.compute.provider_tree [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.635188] env[61570]: DEBUG nova.scheduler.client.report [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.649017] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.535s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.649017] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 651.682397] env[61570]: DEBUG nova.compute.utils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.684054] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 651.684237] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 651.697800] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 651.742795] env[61570]: DEBUG nova.policy [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3504092e3c6a4b0f89d37be9d13474f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f90aebcb3272478fa4a680a56504d1b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 651.765243] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 651.797922] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.798216] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.798383] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.798568] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.798741] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.798898] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.799120] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.799278] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.799515] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.799698] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.799904] env[61570]: DEBUG nova.virt.hardware [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.801109] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82209c3-b3fc-435c-8f03-7c3ff8faa122 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.809575] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd14c63-8e2c-42f0-a5d7-f4eb731d22fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.162026] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Successfully created port: 99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.172498] env[61570]: DEBUG nova.compute.manager [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Received event network-vif-plugged-99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 653.172792] env[61570]: DEBUG oslo_concurrency.lockutils [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] Acquiring lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.172936] env[61570]: DEBUG oslo_concurrency.lockutils [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.173117] env[61570]: DEBUG oslo_concurrency.lockutils [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.173276] env[61570]: DEBUG nova.compute.manager [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] No waiting events found dispatching network-vif-plugged-99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 653.173432] env[61570]: WARNING nova.compute.manager [req-af07881a-1e02-476d-8114-d1abddb4a935 req-1839c676-9696-47b4-bf2c-0cc105b85d60 service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Received unexpected event network-vif-plugged-99768a19-1efe-4a0a-9505-54a8f17f7944 for instance with vm_state building and task_state spawning. [ 653.207559] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Successfully updated port: 99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.238129] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.238129] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.238129] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 653.320613] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 653.627115] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Updating instance_info_cache with network_info: [{"id": "99768a19-1efe-4a0a-9505-54a8f17f7944", "address": "fa:16:3e:aa:c0:2c", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99768a19-1e", "ovs_interfaceid": "99768a19-1efe-4a0a-9505-54a8f17f7944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.638871] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.641846] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance network_info: |[{"id": "99768a19-1efe-4a0a-9505-54a8f17f7944", "address": "fa:16:3e:aa:c0:2c", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99768a19-1e", "ovs_interfaceid": "99768a19-1efe-4a0a-9505-54a8f17f7944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.641972] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:c0:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99768a19-1efe-4a0a-9505-54a8f17f7944', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.656653] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating folder: Project (f90aebcb3272478fa4a680a56504d1b7). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.657368] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4af75e4d-af36-4ebf-a57e-3c1cd84b8aba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.669938] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created folder: Project (f90aebcb3272478fa4a680a56504d1b7) in parent group-v953072. [ 653.670172] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating folder: Instances. Parent ref: group-v953110. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.670843] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec895160-4cc4-48f6-a77f-477d54f10c2e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.681097] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created folder: Instances in parent group-v953110. [ 653.681374] env[61570]: DEBUG oslo.service.loopingcall [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.681588] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 653.681808] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d376065-5a1e-4298-a7ad-503db4b3c5a2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.701494] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.701494] env[61570]: value = "task-4891291" [ 653.701494] env[61570]: _type = "Task" [ 653.701494] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.710448] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891291, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.217429] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891291, 'name': CreateVM_Task, 'duration_secs': 0.307541} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.217919] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 654.218361] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.218606] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.219037] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.219123] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08ac38bb-de7b-4095-8df2-c0f861fd8762 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.225135] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 654.225135] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c1ec78-a957-756b-e72a-b4caa14ad5fc" [ 654.225135] env[61570]: _type = "Task" [ 654.225135] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.238192] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c1ec78-a957-756b-e72a-b4caa14ad5fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.735952] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.736239] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.736497] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.491260] env[61570]: DEBUG nova.compute.manager [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Received event network-changed-99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 655.491583] env[61570]: DEBUG nova.compute.manager [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Refreshing instance network info cache due to event network-changed-99768a19-1efe-4a0a-9505-54a8f17f7944. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 655.491696] env[61570]: DEBUG oslo_concurrency.lockutils [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] Acquiring lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.491834] env[61570]: DEBUG oslo_concurrency.lockutils [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] Acquired lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.492054] env[61570]: DEBUG nova.network.neutron [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Refreshing network info cache for port 99768a19-1efe-4a0a-9505-54a8f17f7944 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 655.894811] env[61570]: DEBUG nova.network.neutron [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Updated VIF entry in instance network info cache for port 99768a19-1efe-4a0a-9505-54a8f17f7944. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 655.895202] env[61570]: DEBUG nova.network.neutron [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Updating instance_info_cache with network_info: [{"id": "99768a19-1efe-4a0a-9505-54a8f17f7944", "address": "fa:16:3e:aa:c0:2c", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99768a19-1e", "ovs_interfaceid": "99768a19-1efe-4a0a-9505-54a8f17f7944", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.905585] env[61570]: DEBUG oslo_concurrency.lockutils [req-9d2bb0a7-f336-4801-b78c-0fc7c0703e93 req-15f10698-e928-4a70-b117-7dfbff7fb3bf service nova] Releasing lock "refresh_cache-aa2e5125-24fb-4476-a585-df838c8cf4d2" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.881188] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.881462] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.754278] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.754525] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 674.754696] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 674.780149] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.780357] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.780460] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.780654] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.780793] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.780913] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.781047] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.781170] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.781290] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.781410] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 674.781621] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 676.752887] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.753165] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.753319] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 676.753471] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.765096] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.765320] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.765485] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.765695] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 676.766750] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90810f0-96d4-497b-a5ca-e7028b95e8f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.775932] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ef6279-0292-4c9b-a00f-a61597729a1c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.790579] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd3b5f7-3495-4f58-8f15-5d064530b8ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.797571] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c6d464-e95a-4a0f-8f9f-36dba5776642 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.827431] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180603MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 676.827600] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.827797] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.905198] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8926a7b1-989f-4290-8828-1e75efbc0553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.905367] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 766812ee-e272-4779-b85d-7fdde876e877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.905495] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20b016b6-a53d-4b1f-a7c7-539d1636091b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.905616] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 48b87f3a-879c-4578-90cf-3e4328299e81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.905740] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.905887] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.906017] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.906133] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.906253] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.906359] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 676.918152] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.930312] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.940601] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 85acb509-fb8a-4f23-90a9-de4fb12fd5dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.953620] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance cec56dfe-ec77-4824-8751-43f85b57c6d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.964118] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 771b52c2-234d-47ad-af34-11cf0d68f5e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.974048] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a39a01c-4e3f-4031-98a2-2a12c492a2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.989223] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 676.998085] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7a5f2f6a-db7f-410d-96cf-376be4ef6dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.009705] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 63eeeab2-6aa3-49c9-b76b-09cc81f8d269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.024219] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8962f2e6-007f-47be-8c56-bb33c8354287 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.034650] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9f25c9d9-4936-4773-8fc3-bf52648752be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.046603] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.057479] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 243873bb-c6d0-4212-8bc6-5512044b9025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.067798] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.078647] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 966ce8b7-1a10-46f7-b113-e191cdc9a6ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.090210] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 82b9c976-583a-46f1-b412-87dd225dba12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.099995] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fb6a424d-c4b0-4913-a4ef-aa361ff25101 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.109743] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 497d6954-60e5-4a83-932c-c95de38b6f7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.119795] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance abf366b6-380a-4108-b351-8a2f2cceb018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.130649] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 677.130963] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 677.131392] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '2', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_b65c16386e5e420d9e1ca6fc7aa13b49': '1', 'io_workload': '10', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_075f06a8aabb4d1d8ec26c7b3f341791': '1', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 677.469374] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35e96c9-850d-4118-9df9-eacf468321c2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.477775] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecbba29-6c4f-492e-a0ad-8e74d07c82ba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.509029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f003605-de6a-4e93-8bf6-5f68d7e6e5c7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.518060] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7c293f-0624-4cf4-96b4-cefac1be860f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.530872] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.539454] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.560523] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 677.560762] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.733s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.561219] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.561505] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.748447] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.753291] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.753552] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 695.358095] env[61570]: WARNING oslo_vmware.rw_handles [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 695.358095] env[61570]: ERROR oslo_vmware.rw_handles [ 695.358621] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 695.360234] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 695.360498] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Copying Virtual Disk [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/15306ddf-a554-490a-88df-4c7de2877cbb/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 695.360811] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8eb89695-b12b-425e-b4d1-f46ac6489090 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.371054] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 695.371054] env[61570]: value = "task-4891292" [ 695.371054] env[61570]: _type = "Task" [ 695.371054] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.379676] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.882122] env[61570]: DEBUG oslo_vmware.exceptions [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 695.882122] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.882358] env[61570]: ERROR nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.882358] env[61570]: Faults: ['InvalidArgument'] [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Traceback (most recent call last): [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] yield resources [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self.driver.spawn(context, instance, image_meta, [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self._fetch_image_if_missing(context, vi) [ 695.882358] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] image_cache(vi, tmp_image_ds_loc) [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] vm_util.copy_virtual_disk( [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] session._wait_for_task(vmdk_copy_task) [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return self.wait_for_task(task_ref) [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return evt.wait() [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] result = hub.switch() [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.882729] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return self.greenlet.switch() [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self.f(*self.args, **self.kw) [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] raise exceptions.translate_fault(task_info.error) [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Faults: ['InvalidArgument'] [ 695.883089] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] [ 695.883089] env[61570]: INFO nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Terminating instance [ 695.884263] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.884467] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.885085] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 695.885277] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 695.885502] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-868bf5be-0846-485f-b716-a12eb52cca1b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.887790] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787e0833-e9dd-4c60-b563-186920d874ef {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.895128] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 695.895356] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff48dd7b-2894-413b-a15e-0ce0b07eba02 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.897648] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.897823] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 695.898796] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde35592-6761-40e7-84ea-9611ce9c540b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.903954] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Waiting for the task: (returnval){ [ 695.903954] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52cca551-817c-4c8c-ecaf-51fb97c3b7a2" [ 695.903954] env[61570]: _type = "Task" [ 695.903954] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.911855] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52cca551-817c-4c8c-ecaf-51fb97c3b7a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.975800] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 695.976021] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 695.976213] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleting the datastore file [datastore2] 20b016b6-a53d-4b1f-a7c7-539d1636091b {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.976473] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c4bae4-e8d7-4246-acbb-87f8cd6f3958 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.983487] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 695.983487] env[61570]: value = "task-4891294" [ 695.983487] env[61570]: _type = "Task" [ 695.983487] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.992352] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.414777] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 696.415102] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Creating directory with path [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.415377] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10f0c0d2-3564-454c-9e1b-3222774c00db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.429744] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Created directory with path [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.430024] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Fetch image to [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 696.430247] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 696.431186] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bce1d48-326c-45d5-a32e-789f54f418a7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.439151] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002e730b-ba75-4d21-a24d-3ff072dcb415 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.448485] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d4a8ef-a5a5-4c46-a908-b4a275796b0c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.480078] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb8e080-2b09-4c00-b375-a9f512b8f6ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.489409] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6155aaea-663a-476e-a4a5-11441aa618d2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.493852] env[61570]: DEBUG oslo_vmware.api [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080606} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.494425] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.494658] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 696.494869] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 696.495101] env[61570]: INFO nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 696.497174] env[61570]: DEBUG nova.compute.claims [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 696.497346] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.497554] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.515648] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 696.569380] env[61570]: DEBUG oslo_vmware.rw_handles [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 696.631996] env[61570]: DEBUG oslo_vmware.rw_handles [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 696.632064] env[61570]: DEBUG oslo_vmware.rw_handles [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 696.977488] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62e9e1c-bcdb-42f2-bed2-94cb79629a35 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.985763] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0583768d-9115-4be6-a4b1-8ffb6755a1be {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.016165] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea72d01-6fd9-4f56-a0d7-20cbeca69671 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.023715] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff1bedd-0818-40dc-b51a-e7314f965a21 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.037083] env[61570]: DEBUG nova.compute.provider_tree [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.046294] env[61570]: DEBUG nova.scheduler.client.report [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.063464] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.566s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.064016] env[61570]: ERROR nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.064016] env[61570]: Faults: ['InvalidArgument'] [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Traceback (most recent call last): [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self.driver.spawn(context, instance, image_meta, [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self._fetch_image_if_missing(context, vi) [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] image_cache(vi, tmp_image_ds_loc) [ 697.064016] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] vm_util.copy_virtual_disk( [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] session._wait_for_task(vmdk_copy_task) [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return self.wait_for_task(task_ref) [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return evt.wait() [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] result = hub.switch() [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] return self.greenlet.switch() [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 697.064417] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] self.f(*self.args, **self.kw) [ 697.064741] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 697.064741] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] raise exceptions.translate_fault(task_info.error) [ 697.064741] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.064741] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Faults: ['InvalidArgument'] [ 697.064741] env[61570]: ERROR nova.compute.manager [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] [ 697.064741] env[61570]: DEBUG nova.compute.utils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.066150] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Build of instance 20b016b6-a53d-4b1f-a7c7-539d1636091b was re-scheduled: A specified parameter was not correct: fileType [ 697.066150] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 697.066530] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 697.066700] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 697.066870] env[61570]: DEBUG nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 697.067045] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 697.479997] env[61570]: DEBUG nova.network.neutron [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.489956] env[61570]: INFO nova.compute.manager [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 20b016b6-a53d-4b1f-a7c7-539d1636091b] Took 0.42 seconds to deallocate network for instance. [ 697.609922] env[61570]: INFO nova.scheduler.client.report [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted allocations for instance 20b016b6-a53d-4b1f-a7c7-539d1636091b [ 697.633025] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4b14803-a8ff-499e-b821-6c54d0791a31 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "20b016b6-a53d-4b1f-a7c7-539d1636091b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.707s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.651274] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 697.722162] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.722620] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.724251] env[61570]: INFO nova.compute.claims [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.146044] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f712eb2-ca7e-4bd0-8147-846b391d0bea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.154349] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30861053-343a-4695-b699-ac077ff5718a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.185857] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b913c9-1aaf-46dd-97df-de0dc819ba16 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.194229] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd9a57d-228c-4549-a112-564b6596ceff {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.208615] env[61570]: DEBUG nova.compute.provider_tree [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.221808] env[61570]: DEBUG nova.scheduler.client.report [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.238517] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.516s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.239076] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 698.284039] env[61570]: DEBUG nova.compute.utils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.285030] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 698.285265] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 698.294332] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 698.356487] env[61570]: DEBUG nova.policy [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3504092e3c6a4b0f89d37be9d13474f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f90aebcb3272478fa4a680a56504d1b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 698.378026] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 698.405374] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.405605] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.405760] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.405939] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.406097] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.406245] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.406448] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.406656] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.406758] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.406920] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.407107] env[61570]: DEBUG nova.virt.hardware [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.407981] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8af2a8e-5786-47e2-bcac-998d31f56ebc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.416990] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e0c4b6-7934-419f-b000-9fb3cd084132 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.827715] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Successfully created port: 853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.780114] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Successfully updated port: 853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.793255] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.793424] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.793734] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.865327] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.918143] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7e103740-734b-4ea4-bd54-4d40a1d21b92 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "29078f8b-7d23-4d10-ab27-88c49ac7fa97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.918460] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7e103740-734b-4ea4-bd54-4d40a1d21b92 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "29078f8b-7d23-4d10-ab27-88c49ac7fa97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.099460] env[61570]: DEBUG nova.compute.manager [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Received event network-vif-plugged-853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 700.099460] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] Acquiring lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.099460] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.099460] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.099638] env[61570]: DEBUG nova.compute.manager [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] No waiting events found dispatching network-vif-plugged-853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 700.100233] env[61570]: WARNING nova.compute.manager [req-4a73e50d-1506-4727-8255-a390d68f1d1f req-30c7434c-d551-4d98-8891-5905d8d8fb45 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Received unexpected event network-vif-plugged-853a7f2f-2d24-4fc1-80c8-b18b634d943c for instance with vm_state building and task_state spawning. [ 700.353850] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Updating instance_info_cache with network_info: [{"id": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "address": "fa:16:3e:4a:9b:16", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap853a7f2f-2d", "ovs_interfaceid": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.368542] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.369028] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance network_info: |[{"id": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "address": "fa:16:3e:4a:9b:16", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap853a7f2f-2d", "ovs_interfaceid": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.369864] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:9b:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '853a7f2f-2d24-4fc1-80c8-b18b634d943c', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.380030] env[61570]: DEBUG oslo.service.loopingcall [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.380252] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 700.380590] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef2f84f1-cc22-4d5d-9c08-870d3eec1eba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.401198] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.401198] env[61570]: value = "task-4891295" [ 700.401198] env[61570]: _type = "Task" [ 700.401198] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.412898] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891295, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.914183] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891295, 'name': CreateVM_Task, 'duration_secs': 0.331502} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.914481] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 700.917611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.917611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.917611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 700.917611] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9875e71-ff80-4f1a-a139-40f10131733d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.922706] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 700.922706] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]528f4e93-0670-a434-27a6-4940edadd582" [ 700.922706] env[61570]: _type = "Task" [ 700.922706] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.939257] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.939257] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.939257] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.124038] env[61570]: DEBUG nova.compute.manager [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Received event network-changed-853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 702.124327] env[61570]: DEBUG nova.compute.manager [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Refreshing instance network info cache due to event network-changed-853a7f2f-2d24-4fc1-80c8-b18b634d943c. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 702.124701] env[61570]: DEBUG oslo_concurrency.lockutils [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] Acquiring lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.124701] env[61570]: DEBUG oslo_concurrency.lockutils [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] Acquired lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.125039] env[61570]: DEBUG nova.network.neutron [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Refreshing network info cache for port 853a7f2f-2d24-4fc1-80c8-b18b634d943c {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 702.475695] env[61570]: DEBUG nova.network.neutron [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Updated VIF entry in instance network info cache for port 853a7f2f-2d24-4fc1-80c8-b18b634d943c. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 702.476074] env[61570]: DEBUG nova.network.neutron [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Updating instance_info_cache with network_info: [{"id": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "address": "fa:16:3e:4a:9b:16", "network": {"id": "a76c487f-68f6-4384-9312-c08ed0a709d9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-113450026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90aebcb3272478fa4a680a56504d1b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap853a7f2f-2d", "ovs_interfaceid": "853a7f2f-2d24-4fc1-80c8-b18b634d943c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.485525] env[61570]: DEBUG oslo_concurrency.lockutils [req-8e8f3f56-8ba8-45fa-8665-012a6fc3019d req-0ae93c05-80f8-4e3f-b69e-0cc8f20a7d42 service nova] Releasing lock "refresh_cache-0a37f623-f757-4f67-a796-a8e17cfb9496" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.754628] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.754958] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 734.754958] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 734.780810] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781047] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781256] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781402] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781533] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781638] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781757] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781877] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.781994] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.782200] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 734.782341] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 736.753687] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.753942] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.767888] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.768132] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.768299] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.768453] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 736.769864] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f5358f-7fe1-492f-b6fd-2f5e25abad97 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.778985] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8e4238-b9ce-474f-8343-563395475fbf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.793695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713e983b-a295-469a-9786-16d22cae61b3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.800927] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ddf284-de8d-4aac-b89a-55e69cdd06cd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.830571] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180584MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 736.830728] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.830926] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.929610] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8926a7b1-989f-4290-8828-1e75efbc0553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.929786] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 766812ee-e272-4779-b85d-7fdde876e877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.929964] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 48b87f3a-879c-4578-90cf-3e4328299e81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930112] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930237] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930356] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930474] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930590] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930704] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.930816] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 736.943154] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 736.954799] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 85acb509-fb8a-4f23-90a9-de4fb12fd5dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 736.964801] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance cec56dfe-ec77-4824-8751-43f85b57c6d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 736.976626] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 771b52c2-234d-47ad-af34-11cf0d68f5e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 736.987697] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a39a01c-4e3f-4031-98a2-2a12c492a2ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 736.999018] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.010438] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7a5f2f6a-db7f-410d-96cf-376be4ef6dc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.022224] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 63eeeab2-6aa3-49c9-b76b-09cc81f8d269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.035028] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8962f2e6-007f-47be-8c56-bb33c8354287 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.046806] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9f25c9d9-4936-4773-8fc3-bf52648752be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.057759] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.068993] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 243873bb-c6d0-4212-8bc6-5512044b9025 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.079079] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.089476] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 966ce8b7-1a10-46f7-b113-e191cdc9a6ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.099721] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 82b9c976-583a-46f1-b412-87dd225dba12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.110259] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fb6a424d-c4b0-4913-a4ef-aa361ff25101 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.120876] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 497d6954-60e5-4a83-932c-c95de38b6f7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.132125] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance abf366b6-380a-4108-b351-8a2f2cceb018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.144460] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.156199] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 29078f8b-7d23-4d10-ab27-88c49ac7fa97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 737.156472] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 737.156636] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '3', 'num_instances': '10', 'num_vm_building': '10', 'num_task_spawning': '10', 'num_os_type_None': '10', 'num_proj_b65c16386e5e420d9e1ca6fc7aa13b49': '1', 'io_workload': '10', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_075f06a8aabb4d1d8ec26c7b3f341791': '1', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 737.521483] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09bbf09-2067-4a51-be6a-15c7bd364141 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.530401] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73c76b7-3e41-43c6-8b01-88e45db4a081 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.559692] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683c1ab6-6c01-4597-8d48-ddfe1de9fe22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.567672] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd907479-3df6-419e-b036-3d403e93fce0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.582867] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.591835] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.608884] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 737.608884] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.778s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.602888] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.754076] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.754076] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.754076] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 739.748875] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.752597] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.752812] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.752942] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.793769] env[61570]: WARNING oslo_vmware.rw_handles [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 743.793769] env[61570]: ERROR oslo_vmware.rw_handles [ 743.795612] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 743.796264] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 743.796602] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Copying Virtual Disk [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/3ee2f585-6875-4c42-9568-e2d3fd2408de/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.796957] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cce4e7e8-d7e8-4811-a693-3a7613749968 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.806682] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Waiting for the task: (returnval){ [ 743.806682] env[61570]: value = "task-4891296" [ 743.806682] env[61570]: _type = "Task" [ 743.806682] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.815724] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Task: {'id': task-4891296, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.318763] env[61570]: DEBUG oslo_vmware.exceptions [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 744.318763] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.318763] env[61570]: ERROR nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 744.318763] env[61570]: Faults: ['InvalidArgument'] [ 744.318763] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Traceback (most recent call last): [ 744.318763] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 744.318763] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] yield resources [ 744.318763] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self.driver.spawn(context, instance, image_meta, [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self._vmops.spawn(context, instance, image_meta, injected_files, [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self._fetch_image_if_missing(context, vi) [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] image_cache(vi, tmp_image_ds_loc) [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] vm_util.copy_virtual_disk( [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] session._wait_for_task(vmdk_copy_task) [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 744.319146] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return self.wait_for_task(task_ref) [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return evt.wait() [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] result = hub.switch() [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return self.greenlet.switch() [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self.f(*self.args, **self.kw) [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] raise exceptions.translate_fault(task_info.error) [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Faults: ['InvalidArgument'] [ 744.319556] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] [ 744.319905] env[61570]: INFO nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Terminating instance [ 744.320702] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.321301] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.321301] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef70939d-ffc7-43c7-8bbd-e38e6c32ef67 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.323542] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 744.323725] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 744.324470] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005aefe3-b3fa-44fa-8bdf-e612bcd52204 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.331777] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 744.332030] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c24b975d-3658-4ae6-94d3-ecaa1ac63606 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.334475] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.334638] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 744.335748] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96e73568-8175-4bd1-b645-f0492b586e3c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.341301] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for the task: (returnval){ [ 744.341301] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52bcd0d7-e911-65f8-21f1-d97c67279113" [ 744.341301] env[61570]: _type = "Task" [ 744.341301] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.349639] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52bcd0d7-e911-65f8-21f1-d97c67279113, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.408159] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 744.408370] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 744.408529] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Deleting the datastore file [datastore2] 8926a7b1-989f-4290-8828-1e75efbc0553 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 744.408782] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e0f212e-85bb-46c0-9124-8a7778bb70a6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.418018] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Waiting for the task: (returnval){ [ 744.418018] env[61570]: value = "task-4891298" [ 744.418018] env[61570]: _type = "Task" [ 744.418018] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.424554] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Task: {'id': task-4891298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.853011] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 744.853351] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Creating directory with path [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.853503] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81783ab7-b68c-4a36-baf9-765a76cdebbe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.865890] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Created directory with path [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.866091] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Fetch image to [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 744.866265] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 744.867115] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb77684c-92d7-4df2-a6c9-81a361cfde0f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.876233] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5411a777-7a6c-4a23-953a-42b18850d170 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.888628] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5fdb50-6515-4427-9d0a-947e0700b469 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.922916] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87e89d3-51a2-4804-9eb5-534c731aa4cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.933298] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e57a102b-889c-4599-af85-c1424319b743 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.935137] env[61570]: DEBUG oslo_vmware.api [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Task: {'id': task-4891298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077135} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.935406] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 744.935589] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 744.935862] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 744.935924] env[61570]: INFO nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Took 0.61 seconds to destroy the instance on the hypervisor. [ 744.938637] env[61570]: DEBUG nova.compute.claims [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.938818] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.939041] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.960533] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 745.021012] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 745.085894] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 745.086387] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 745.439819] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fdbe32-2eba-4b18-97b8-5458a8fbb66d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.449882] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa8a4e7-c967-4b7c-9d8b-6321d2fcd801 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.481700] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25919257-91f4-4140-8039-662200acbb66 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.490829] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc92e13-8b10-41db-b8a3-e2946fa11f24 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.505511] env[61570]: DEBUG nova.compute.provider_tree [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.516195] env[61570]: DEBUG nova.scheduler.client.report [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.536494] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.597s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.537192] env[61570]: ERROR nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 745.537192] env[61570]: Faults: ['InvalidArgument'] [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Traceback (most recent call last): [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self.driver.spawn(context, instance, image_meta, [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self._fetch_image_if_missing(context, vi) [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] image_cache(vi, tmp_image_ds_loc) [ 745.537192] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] vm_util.copy_virtual_disk( [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] session._wait_for_task(vmdk_copy_task) [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return self.wait_for_task(task_ref) [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return evt.wait() [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] result = hub.switch() [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] return self.greenlet.switch() [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 745.537519] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] self.f(*self.args, **self.kw) [ 745.537826] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 745.537826] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] raise exceptions.translate_fault(task_info.error) [ 745.537826] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 745.537826] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Faults: ['InvalidArgument'] [ 745.537826] env[61570]: ERROR nova.compute.manager [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] [ 745.537944] env[61570]: DEBUG nova.compute.utils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 745.539958] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Build of instance 8926a7b1-989f-4290-8828-1e75efbc0553 was re-scheduled: A specified parameter was not correct: fileType [ 745.539958] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 745.540360] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 745.540530] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 745.540682] env[61570]: DEBUG nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 745.540844] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.937641] env[61570]: DEBUG nova.network.neutron [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.951386] env[61570]: INFO nova.compute.manager [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] [instance: 8926a7b1-989f-4290-8828-1e75efbc0553] Took 0.41 seconds to deallocate network for instance. [ 746.057623] env[61570]: INFO nova.scheduler.client.report [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Deleted allocations for instance 8926a7b1-989f-4290-8828-1e75efbc0553 [ 746.085787] env[61570]: DEBUG oslo_concurrency.lockutils [None req-78cd708d-2173-4c46-af3e-dd73ef8e4c4e tempest-ServerDiagnosticsNegativeTest-202714582 tempest-ServerDiagnosticsNegativeTest-202714582-project-member] Lock "8926a7b1-989f-4290-8828-1e75efbc0553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.097s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.101998] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 746.158074] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.158074] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.159804] env[61570]: INFO nova.compute.claims [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.393911] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "766812ee-e272-4779-b85d-7fdde876e877" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.600304] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c147f32-6b5a-46a0-b480-ade17c8af84c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.609637] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7122b009-b08d-4d1e-8faf-68ed3cc8fb63 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.640210] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414b1f07-07c7-4245-93b7-af29e21fba48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.648270] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76a7b86-4f19-4b9c-8dae-5fea4e01cecd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.662508] env[61570]: DEBUG nova.compute.provider_tree [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.670764] env[61570]: DEBUG nova.scheduler.client.report [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 746.686030] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.528s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.686479] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 746.722844] env[61570]: DEBUG nova.compute.utils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.725240] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 746.725240] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.733956] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 746.798025] env[61570]: DEBUG nova.policy [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e65eaa601f4ce0b703bc43904ce00e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf6825d6d7de4a6f88c5aa497feacb1c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 746.805554] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 746.836567] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.836834] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.836990] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.837196] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.837347] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.837495] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.837768] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.837941] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.838122] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.838290] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.838464] env[61570]: DEBUG nova.virt.hardware [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.839352] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f102d9b2-710d-4ba1-8dec-59b31a730eeb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.849978] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1ff957-0cc0-4300-9891-aa0288097f80 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.228264] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Successfully created port: 72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.262719] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Successfully updated port: 72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.297716] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.297716] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquired lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.297716] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.353596] env[61570]: DEBUG nova.compute.manager [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Received event network-vif-plugged-72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 748.353810] env[61570]: DEBUG oslo_concurrency.lockutils [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] Acquiring lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.354116] env[61570]: DEBUG oslo_concurrency.lockutils [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.354193] env[61570]: DEBUG oslo_concurrency.lockutils [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.354362] env[61570]: DEBUG nova.compute.manager [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] No waiting events found dispatching network-vif-plugged-72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 748.354517] env[61570]: WARNING nova.compute.manager [req-38805ddc-b704-44ad-b436-1b4cee10de11 req-d0dcfeb5-89ef-4067-9efa-5d170fa87d5e service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Received unexpected event network-vif-plugged-72dd0198-fcee-4733-9221-9e30697cd26d for instance with vm_state building and task_state spawning. [ 748.554809] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.853999] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Updating instance_info_cache with network_info: [{"id": "72dd0198-fcee-4733-9221-9e30697cd26d", "address": "fa:16:3e:1c:36:3f", "network": {"id": "38991865-72f5-4f54-96b0-848dd5ddc603", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1563506533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf6825d6d7de4a6f88c5aa497feacb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72dd0198-fc", "ovs_interfaceid": "72dd0198-fcee-4733-9221-9e30697cd26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.868236] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Releasing lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.868553] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance network_info: |[{"id": "72dd0198-fcee-4733-9221-9e30697cd26d", "address": "fa:16:3e:1c:36:3f", "network": {"id": "38991865-72f5-4f54-96b0-848dd5ddc603", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1563506533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf6825d6d7de4a6f88c5aa497feacb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72dd0198-fc", "ovs_interfaceid": "72dd0198-fcee-4733-9221-9e30697cd26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.868977] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:36:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72dd0198-fcee-4733-9221-9e30697cd26d', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.876763] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Creating folder: Project (cf6825d6d7de4a6f88c5aa497feacb1c). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.878112] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58fb2390-8b9f-431e-86cc-ea2a2bc12696 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.889306] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Created folder: Project (cf6825d6d7de4a6f88c5aa497feacb1c) in parent group-v953072. [ 748.890186] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Creating folder: Instances. Parent ref: group-v953114. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.890186] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5a7abcf-a57d-4144-90e4-c24915320764 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.900034] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Created folder: Instances in parent group-v953114. [ 748.900034] env[61570]: DEBUG oslo.service.loopingcall [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.900182] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 748.900841] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d8e3aa4-198b-4d2e-8d03-3d6e9781c9d8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.921565] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.921565] env[61570]: value = "task-4891301" [ 748.921565] env[61570]: _type = "Task" [ 748.921565] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.930628] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891301, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.432114] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891301, 'name': CreateVM_Task, 'duration_secs': 0.347917} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.432502] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.433142] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.433364] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.433763] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.434083] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d32238a-7c98-41bb-9797-6f450ecacd46 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.439763] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for the task: (returnval){ [ 749.439763] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52009f86-41ff-3770-d604-ca2d45fd35db" [ 749.439763] env[61570]: _type = "Task" [ 749.439763] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.449497] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52009f86-41ff-3770-d604-ca2d45fd35db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.952029] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.952405] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.952606] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.516857] env[61570]: DEBUG nova.compute.manager [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Received event network-changed-72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 750.517207] env[61570]: DEBUG nova.compute.manager [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Refreshing instance network info cache due to event network-changed-72dd0198-fcee-4733-9221-9e30697cd26d. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 750.517315] env[61570]: DEBUG oslo_concurrency.lockutils [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] Acquiring lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.517459] env[61570]: DEBUG oslo_concurrency.lockutils [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] Acquired lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.517618] env[61570]: DEBUG nova.network.neutron [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Refreshing network info cache for port 72dd0198-fcee-4733-9221-9e30697cd26d {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 750.901409] env[61570]: DEBUG nova.network.neutron [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Updated VIF entry in instance network info cache for port 72dd0198-fcee-4733-9221-9e30697cd26d. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 750.901785] env[61570]: DEBUG nova.network.neutron [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Updating instance_info_cache with network_info: [{"id": "72dd0198-fcee-4733-9221-9e30697cd26d", "address": "fa:16:3e:1c:36:3f", "network": {"id": "38991865-72f5-4f54-96b0-848dd5ddc603", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1563506533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf6825d6d7de4a6f88c5aa497feacb1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72dd0198-fc", "ovs_interfaceid": "72dd0198-fcee-4733-9221-9e30697cd26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.913269] env[61570]: DEBUG oslo_concurrency.lockutils [req-44f49d8a-0fdb-4b84-93bc-0a63f36d2323 req-d8a6590c-2793-41b3-9f02-ea46dda7d0d7 service nova] Releasing lock "refresh_cache-1435e51b-58b8-406f-9def-f9e6e7bffd8a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.627817] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "48b87f3a-879c-4578-90cf-3e4328299e81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.175135] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.826684] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.590384] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "ded35886-716c-4725-8fc9-cd6dfc04281a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.816964] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.817210] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.219527] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.219527] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.481725] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "12435283-c350-4d85-be82-1c85e1ea17be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.846943] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.255020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "0a37f623-f757-4f67-a796-a8e17cfb9496" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.404446] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.590435] env[61570]: DEBUG oslo_concurrency.lockutils [None req-36179c56-713d-4ff7-987d-775f4d0a69f6 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Acquiring lock "43599711-7de0-465c-a8ab-fc24d90ed9c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.590803] env[61570]: DEBUG oslo_concurrency.lockutils [None req-36179c56-713d-4ff7-987d-775f4d0a69f6 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Lock "43599711-7de0-465c-a8ab-fc24d90ed9c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.897877] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d0dbfaea-127d-42c5-a05d-a12943d26870 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Acquiring lock "f3914246-a7b8-46d3-b8c1-3c7254a30693" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.898256] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d0dbfaea-127d-42c5-a05d-a12943d26870 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Lock "f3914246-a7b8-46d3-b8c1-3c7254a30693" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.812487] env[61570]: WARNING oslo_vmware.rw_handles [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 793.812487] env[61570]: ERROR oslo_vmware.rw_handles [ 793.813826] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 793.815838] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 793.816158] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Copying Virtual Disk [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/c1e5377e-85ac-4b09-a03f-176fba5ac5da/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 793.817425] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8acc1a85-87dd-4c80-89a2-1d3150a304f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.828574] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for the task: (returnval){ [ 793.828574] env[61570]: value = "task-4891302" [ 793.828574] env[61570]: _type = "Task" [ 793.828574] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.839376] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Task: {'id': task-4891302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.346740] env[61570]: DEBUG oslo_vmware.exceptions [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 794.346740] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.346740] env[61570]: ERROR nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 794.346740] env[61570]: Faults: ['InvalidArgument'] [ 794.346740] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Traceback (most recent call last): [ 794.346740] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 794.346740] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] yield resources [ 794.346740] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self.driver.spawn(context, instance, image_meta, [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self._fetch_image_if_missing(context, vi) [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] image_cache(vi, tmp_image_ds_loc) [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] vm_util.copy_virtual_disk( [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] session._wait_for_task(vmdk_copy_task) [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 794.347129] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return self.wait_for_task(task_ref) [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return evt.wait() [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] result = hub.switch() [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return self.greenlet.switch() [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self.f(*self.args, **self.kw) [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] raise exceptions.translate_fault(task_info.error) [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Faults: ['InvalidArgument'] [ 794.347485] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] [ 794.347830] env[61570]: INFO nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Terminating instance [ 794.349151] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.349977] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.351131] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 794.351675] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 794.352079] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9794ce7c-870b-43ad-b52e-13417e96bad9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.355115] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe55bb59-01e7-4641-b571-441faa9829dc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.366021] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 794.367071] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0425505-7986-440d-a73d-7dd503dd0512 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.370254] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.370254] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 794.371391] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-959400f4-e8f2-414a-8699-768c300f0ac7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.379413] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 794.379413] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ad357a-0d8e-a067-80d8-fdae15e4dc0e" [ 794.379413] env[61570]: _type = "Task" [ 794.379413] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.388422] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ad357a-0d8e-a067-80d8-fdae15e4dc0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.456995] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 794.456995] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 794.456995] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Deleting the datastore file [datastore2] 48b87f3a-879c-4578-90cf-3e4328299e81 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.456995] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d234dd0-f74f-4b0e-8895-ab99582d85fc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.463394] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for the task: (returnval){ [ 794.463394] env[61570]: value = "task-4891304" [ 794.463394] env[61570]: _type = "Task" [ 794.463394] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.475231] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Task: {'id': task-4891304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.753461] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.753639] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 794.769576] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 794.770477] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.770635] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 794.800164] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3109278e-54da-42f5-af5c-3800572a6f1a tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "1c76ec6f-08e0-4786-bcac-70fbc87fc789" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.800511] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3109278e-54da-42f5-af5c-3800572a6f1a tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "1c76ec6f-08e0-4786-bcac-70fbc87fc789" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.801802] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.897157] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 794.897510] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating directory with path [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.897510] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d28dea92-c58c-4667-b022-acbfa273d470 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.911895] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created directory with path [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.912140] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Fetch image to [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 794.912430] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 794.913376] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113bb60e-c8d1-43e1-9505-8add8ab6d27b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.924025] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e46718-e785-48da-8f22-8b11a6c77f3d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.937257] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fe2f94-9cb7-45a4-b18b-676203ba34c5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.978115] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414cf492-c410-4da2-adef-ba822253cbb0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.991397] env[61570]: DEBUG oslo_vmware.api [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Task: {'id': task-4891304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073166} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.992524] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.992635] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 794.992896] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 794.993206] env[61570]: INFO nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Took 0.64 seconds to destroy the instance on the hypervisor. [ 794.995576] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9c64f817-fa84-4f34-941d-f78696e0bb81 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.999314] env[61570]: DEBUG nova.compute.claims [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 794.999355] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.999737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.033445] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 795.106124] env[61570]: DEBUG oslo_vmware.rw_handles [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 795.170223] env[61570]: DEBUG oslo_vmware.rw_handles [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 795.170223] env[61570]: DEBUG oslo_vmware.rw_handles [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 795.575757] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9353b26f-e42c-4846-b764-bebdd9d7206e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.587227] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2a0378-edd9-4252-ab7d-220675046f86 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.627888] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2937f263-6bfb-4947-b520-7233e0d0cd0d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.633804] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038cf4de-5627-47f7-99b9-64dca1363ec3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.652089] env[61570]: DEBUG nova.compute.provider_tree [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.665810] env[61570]: DEBUG nova.scheduler.client.report [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 795.704187] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.704s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.704799] env[61570]: ERROR nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 795.704799] env[61570]: Faults: ['InvalidArgument'] [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Traceback (most recent call last): [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self.driver.spawn(context, instance, image_meta, [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self._fetch_image_if_missing(context, vi) [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] image_cache(vi, tmp_image_ds_loc) [ 795.704799] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] vm_util.copy_virtual_disk( [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] session._wait_for_task(vmdk_copy_task) [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return self.wait_for_task(task_ref) [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return evt.wait() [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] result = hub.switch() [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] return self.greenlet.switch() [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 795.705277] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] self.f(*self.args, **self.kw) [ 795.705587] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 795.705587] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] raise exceptions.translate_fault(task_info.error) [ 795.705587] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 795.705587] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Faults: ['InvalidArgument'] [ 795.705587] env[61570]: ERROR nova.compute.manager [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] [ 795.705716] env[61570]: DEBUG nova.compute.utils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 795.710160] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Build of instance 48b87f3a-879c-4578-90cf-3e4328299e81 was re-scheduled: A specified parameter was not correct: fileType [ 795.710160] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 795.710160] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 795.710160] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 795.710160] env[61570]: DEBUG nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 795.710332] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 795.814669] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.814669] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 795.814798] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 795.846527] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846527] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846527] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846527] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846527] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846848] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.846848] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.847182] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.847182] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 795.847182] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 796.074738] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35c5833e-5987-47b2-9d2e-3e5db7f4653b tempest-ServerRescueTestJSONUnderV235-1606670301 tempest-ServerRescueTestJSONUnderV235-1606670301-project-member] Acquiring lock "94e9909d-31a5-4d2c-a12b-aaebb32d4445" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.075054] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35c5833e-5987-47b2-9d2e-3e5db7f4653b tempest-ServerRescueTestJSONUnderV235-1606670301 tempest-ServerRescueTestJSONUnderV235-1606670301-project-member] Lock "94e9909d-31a5-4d2c-a12b-aaebb32d4445" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.322814] env[61570]: DEBUG nova.network.neutron [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.343672] env[61570]: INFO nova.compute.manager [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Took 0.63 seconds to deallocate network for instance. [ 796.541231] env[61570]: INFO nova.scheduler.client.report [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Deleted allocations for instance 48b87f3a-879c-4578-90cf-3e4328299e81 [ 796.567177] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3aed01b2-2916-4a85-a8ba-613335acf322 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.290s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.567505] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 44.940s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.567842] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Acquiring lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.568013] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.568276] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.571939] env[61570]: INFO nova.compute.manager [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Terminating instance [ 796.574032] env[61570]: DEBUG nova.compute.manager [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 796.574032] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 796.574265] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44a89a03-37fb-442b-8aab-488e992f4853 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.584432] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffdf847-53c7-40b3-994c-1fca93a4219a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.607596] env[61570]: DEBUG nova.compute.manager [None req-bba7f229-b350-4365-aa2b-40e5bd4d8ac1 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 85acb509-fb8a-4f23-90a9-de4fb12fd5dc] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 796.624680] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 48b87f3a-879c-4578-90cf-3e4328299e81 could not be found. [ 796.625139] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 796.626034] env[61570]: INFO nova.compute.manager [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Took 0.05 seconds to destroy the instance on the hypervisor. [ 796.626034] env[61570]: DEBUG oslo.service.loopingcall [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.626451] env[61570]: DEBUG nova.compute.manager [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 796.626600] env[61570]: DEBUG nova.network.neutron [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.647409] env[61570]: DEBUG nova.compute.manager [None req-bba7f229-b350-4365-aa2b-40e5bd4d8ac1 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 85acb509-fb8a-4f23-90a9-de4fb12fd5dc] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 796.657157] env[61570]: DEBUG nova.network.neutron [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.666976] env[61570]: INFO nova.compute.manager [-] [instance: 48b87f3a-879c-4578-90cf-3e4328299e81] Took 0.04 seconds to deallocate network for instance. [ 796.685387] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bba7f229-b350-4365-aa2b-40e5bd4d8ac1 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "85acb509-fb8a-4f23-90a9-de4fb12fd5dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.503s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.695963] env[61570]: DEBUG nova.compute.manager [None req-254e8122-3278-48f1-8d1c-c16e4c21d0c5 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] [instance: cec56dfe-ec77-4824-8751-43f85b57c6d2] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 796.780187] env[61570]: DEBUG nova.compute.manager [None req-254e8122-3278-48f1-8d1c-c16e4c21d0c5 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] [instance: cec56dfe-ec77-4824-8751-43f85b57c6d2] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 796.814148] env[61570]: DEBUG oslo_concurrency.lockutils [None req-254e8122-3278-48f1-8d1c-c16e4c21d0c5 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Lock "cec56dfe-ec77-4824-8751-43f85b57c6d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.225s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.825443] env[61570]: DEBUG nova.compute.manager [None req-34d8d5b4-e887-4d72-a038-cdf2af5118e7 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 771b52c2-234d-47ad-af34-11cf0d68f5e1] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 796.874081] env[61570]: DEBUG nova.compute.manager [None req-34d8d5b4-e887-4d72-a038-cdf2af5118e7 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 771b52c2-234d-47ad-af34-11cf0d68f5e1] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 796.892863] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3b942ae-de56-405b-8a26-e293222dae40 tempest-FloatingIPsAssociationTestJSON-1246425191 tempest-FloatingIPsAssociationTestJSON-1246425191-project-member] Lock "48b87f3a-879c-4578-90cf-3e4328299e81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.325s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.916029] env[61570]: DEBUG oslo_concurrency.lockutils [None req-34d8d5b4-e887-4d72-a038-cdf2af5118e7 tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "771b52c2-234d-47ad-af34-11cf0d68f5e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.287s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.931852] env[61570]: DEBUG nova.compute.manager [None req-f18f0c08-1ebb-4287-9104-72ac1342a1a8 tempest-ServersWithSpecificFlavorTestJSON-198724013 tempest-ServersWithSpecificFlavorTestJSON-198724013-project-member] [instance: 0a39a01c-4e3f-4031-98a2-2a12c492a2ea] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 796.979295] env[61570]: DEBUG nova.compute.manager [None req-f18f0c08-1ebb-4287-9104-72ac1342a1a8 tempest-ServersWithSpecificFlavorTestJSON-198724013 tempest-ServersWithSpecificFlavorTestJSON-198724013-project-member] [instance: 0a39a01c-4e3f-4031-98a2-2a12c492a2ea] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.011291] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f18f0c08-1ebb-4287-9104-72ac1342a1a8 tempest-ServersWithSpecificFlavorTestJSON-198724013 tempest-ServersWithSpecificFlavorTestJSON-198724013-project-member] Lock "0a39a01c-4e3f-4031-98a2-2a12c492a2ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.165s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.029210] env[61570]: DEBUG nova.compute.manager [None req-a551d30d-e931-460b-ae62-c9d0a8c0f43e tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.061105] env[61570]: DEBUG nova.compute.manager [None req-a551d30d-e931-460b-ae62-c9d0a8c0f43e tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] [instance: 76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.130030] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a551d30d-e931-460b-ae62-c9d0a8c0f43e tempest-ListServerFiltersTestJSON-483714309 tempest-ListServerFiltersTestJSON-483714309-project-member] Lock "76e1cf2e-74c6-408b-9d9c-cd04d8e2b4c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.801s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.146339] env[61570]: DEBUG nova.compute.manager [None req-c5bf6c36-1fd3-46f3-aea4-e42bdfd7d749 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] [instance: 7a5f2f6a-db7f-410d-96cf-376be4ef6dc5] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.179025] env[61570]: DEBUG nova.compute.manager [None req-c5bf6c36-1fd3-46f3-aea4-e42bdfd7d749 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] [instance: 7a5f2f6a-db7f-410d-96cf-376be4ef6dc5] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.207672] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c5bf6c36-1fd3-46f3-aea4-e42bdfd7d749 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Lock "7a5f2f6a-db7f-410d-96cf-376be4ef6dc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.831s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.219977] env[61570]: DEBUG nova.compute.manager [None req-91511a3d-5e93-49cb-881f-ecf7b421fa59 tempest-ServerDiagnosticsV248Test-1075744459 tempest-ServerDiagnosticsV248Test-1075744459-project-member] [instance: 63eeeab2-6aa3-49c9-b76b-09cc81f8d269] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.250024] env[61570]: DEBUG nova.compute.manager [None req-91511a3d-5e93-49cb-881f-ecf7b421fa59 tempest-ServerDiagnosticsV248Test-1075744459 tempest-ServerDiagnosticsV248Test-1075744459-project-member] [instance: 63eeeab2-6aa3-49c9-b76b-09cc81f8d269] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.305219] env[61570]: DEBUG oslo_concurrency.lockutils [None req-91511a3d-5e93-49cb-881f-ecf7b421fa59 tempest-ServerDiagnosticsV248Test-1075744459 tempest-ServerDiagnosticsV248Test-1075744459-project-member] Lock "63eeeab2-6aa3-49c9-b76b-09cc81f8d269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.655s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.337192] env[61570]: DEBUG nova.compute.manager [None req-bfa8afda-590c-4c92-ae7f-633518142711 tempest-ServerExternalEventsTest-554257046 tempest-ServerExternalEventsTest-554257046-project-member] [instance: 8962f2e6-007f-47be-8c56-bb33c8354287] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.372024] env[61570]: DEBUG nova.compute.manager [None req-bfa8afda-590c-4c92-ae7f-633518142711 tempest-ServerExternalEventsTest-554257046 tempest-ServerExternalEventsTest-554257046-project-member] [instance: 8962f2e6-007f-47be-8c56-bb33c8354287] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.403882] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bfa8afda-590c-4c92-ae7f-633518142711 tempest-ServerExternalEventsTest-554257046 tempest-ServerExternalEventsTest-554257046-project-member] Lock "8962f2e6-007f-47be-8c56-bb33c8354287" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.032s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.422813] env[61570]: DEBUG nova.compute.manager [None req-54a4f1c4-b46a-4f4b-b842-5a00b3348184 tempest-ServerActionsTestOtherA-2122794491 tempest-ServerActionsTestOtherA-2122794491-project-member] [instance: 9f25c9d9-4936-4773-8fc3-bf52648752be] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.460586] env[61570]: DEBUG nova.compute.manager [None req-54a4f1c4-b46a-4f4b-b842-5a00b3348184 tempest-ServerActionsTestOtherA-2122794491 tempest-ServerActionsTestOtherA-2122794491-project-member] [instance: 9f25c9d9-4936-4773-8fc3-bf52648752be] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.490490] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a4f1c4-b46a-4f4b-b842-5a00b3348184 tempest-ServerActionsTestOtherA-2122794491 tempest-ServerActionsTestOtherA-2122794491-project-member] Lock "9f25c9d9-4936-4773-8fc3-bf52648752be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.846s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.503667] env[61570]: DEBUG nova.compute.manager [None req-ccf76edd-2927-4ff4-80bc-5750b5ba77ee tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.533627] env[61570]: DEBUG nova.compute.manager [None req-ccf76edd-2927-4ff4-80bc-5750b5ba77ee tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.567445] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ccf76edd-2927-4ff4-80bc-5750b5ba77ee tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8dfc1050-3c8f-43fc-b51a-c7b5d3c875ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.433s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.584760] env[61570]: DEBUG nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.657166] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.659027] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.659884] env[61570]: INFO nova.compute.claims [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 797.689175] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.031s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.689992] env[61570]: DEBUG nova.compute.utils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Instance 243873bb-c6d0-4212-8bc6-5512044b9025 could not be found. {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 797.694884] env[61570]: DEBUG nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Instance disappeared during build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 797.695087] env[61570]: DEBUG nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 797.695317] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Acquiring lock "refresh_cache-243873bb-c6d0-4212-8bc6-5512044b9025" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.695462] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Acquired lock "refresh_cache-243873bb-c6d0-4212-8bc6-5512044b9025" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.695627] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 797.705370] env[61570]: DEBUG nova.compute.utils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Can not refresh info_cache because instance was not found {{(pid=61570) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 797.744749] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.754531] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.770692] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.771361] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.771733] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.772251] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 797.773314] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20893646-8ffe-457a-b688-f65e78239e7b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.787816] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59dbb7e-6b01-4c83-9247-9db42940af6b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.810656] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf7ff15-5db5-453d-875b-5067123bc9ba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.819077] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f68a2ce-f751-4704-b14d-9e5429ed0313 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.856997] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180600MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 797.857231] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.857470] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.935984] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 766812ee-e272-4779-b85d-7fdde876e877 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936183] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936309] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936430] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936604] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936659] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936767] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936880] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.936993] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 797.949924] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 797.961590] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 966ce8b7-1a10-46f7-b113-e191cdc9a6ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 797.972690] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 82b9c976-583a-46f1-b412-87dd225dba12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 797.984858] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fb6a424d-c4b0-4913-a4ef-aa361ff25101 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.000103] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 497d6954-60e5-4a83-932c-c95de38b6f7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.014079] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance abf366b6-380a-4108-b351-8a2f2cceb018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.026765] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.039662] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 29078f8b-7d23-4d10-ab27-88c49ac7fa97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.044680] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.056346] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.078452] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Releasing lock "refresh_cache-243873bb-c6d0-4212-8bc6-5512044b9025" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.078452] env[61570]: DEBUG nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 798.078452] env[61570]: DEBUG nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 798.078674] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 798.083625] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.097386] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 43599711-7de0-465c-a8ab-fc24d90ed9c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.109609] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 798.111750] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3914246-a7b8-46d3-b8c1-3c7254a30693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.119367] env[61570]: DEBUG nova.network.neutron [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.123961] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 94e9909d-31a5-4d2c-a12b-aaebb32d4445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 798.124383] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 798.124551] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '15', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '8', 'num_os_type_None': '9', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'io_workload': '9', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_task_spawning': '1', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 798.134427] env[61570]: INFO nova.compute.manager [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] [instance: 243873bb-c6d0-4212-8bc6-5512044b9025] Took 0.06 seconds to deallocate network for instance. [ 798.251668] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2a2b7b59-2045-4272-8a18-121b94eae98a tempest-ServersTestBootFromVolume-901424769 tempest-ServersTestBootFromVolume-901424769-project-member] Lock "243873bb-c6d0-4212-8bc6-5512044b9025" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.803s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.275952] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 798.372989] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.610551] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a6c883-17f4-4aa8-af2b-f5e5e8842f22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.623810] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17b2ebe-53c9-4447-b3f8-63fc2c8b310c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.663034] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701bb68d-37cc-43b3-85e4-ebb7a1330d6a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.669845] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761c75a1-fe1b-467b-b517-a0b7b792ee11 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.686456] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.705595] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.731838] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 798.731838] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.873s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.731838] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.358s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.732149] env[61570]: INFO nova.compute.claims [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.238633] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d005732b-1869-4f76-aaf1-1c3f59f6d1e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.249698] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeb0e1f-6aa1-429e-80a0-35c4a7b24194 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.284676] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b830cd6-5ca5-45f5-9ecd-2c30968878c5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.295114] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34628eb-a039-4845-930c-ed398a6cba3b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.312661] env[61570]: DEBUG nova.compute.provider_tree [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.326032] env[61570]: DEBUG nova.scheduler.client.report [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.359261] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.629s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.360017] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 799.423036] env[61570]: DEBUG nova.compute.utils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.425513] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 799.425729] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 799.451968] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 799.555525] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 799.579540] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "8ef1d751-e809-46e0-b98f-ac90ab076889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.579786] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.594794] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.595037] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.595192] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.595367] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.595508] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.595653] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.595855] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.596023] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.596190] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.596348] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.596517] env[61570]: DEBUG nova.virt.hardware [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.598025] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f31c08-72d4-408a-a509-4beadbe0fdda {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.608596] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086aa805-6c9c-41c1-8db4-b0a44deef590 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.626941] env[61570]: DEBUG nova.policy [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '656f0afcb6c741978973ee83a1b28a76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5922737c204d481fb40713877b5f46f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.734287] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.734536] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.734702] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.734847] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 799.748879] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.752621] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.728332] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.752624] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.752807] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.915839] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Successfully created port: 3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.060292] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Successfully updated port: 3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 803.101087] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.101450] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquired lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.101529] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.178505] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.838713] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Updating instance_info_cache with network_info: [{"id": "3a75fb31-58b0-4547-95af-af9a90f7375e", "address": "fa:16:3e:5e:df:85", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a75fb31-58", "ovs_interfaceid": "3a75fb31-58b0-4547-95af-af9a90f7375e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.855705] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Releasing lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.856153] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance network_info: |[{"id": "3a75fb31-58b0-4547-95af-af9a90f7375e", "address": "fa:16:3e:5e:df:85", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a75fb31-58", "ovs_interfaceid": "3a75fb31-58b0-4547-95af-af9a90f7375e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.856500] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:df:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a75fb31-58b0-4547-95af-af9a90f7375e', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.871132] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Creating folder: Project (5922737c204d481fb40713877b5f46f5). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.871132] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f33209b-dd41-4201-adb9-458484c9f447 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.884181] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Created folder: Project (5922737c204d481fb40713877b5f46f5) in parent group-v953072. [ 803.884985] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Creating folder: Instances. Parent ref: group-v953117. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.884985] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f4e6cd5-6cdd-4188-a047-1ca872f0b3ff {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.897555] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Created folder: Instances in parent group-v953117. [ 803.898748] env[61570]: DEBUG oslo.service.loopingcall [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.898748] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 803.898748] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a7b4e5a-141f-48ce-a3ca-2a45cfa0a923 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.922440] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.922440] env[61570]: value = "task-4891307" [ 803.922440] env[61570]: _type = "Task" [ 803.922440] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.931567] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891307, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.016586] env[61570]: DEBUG nova.compute.manager [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Received event network-vif-plugged-3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 804.016586] env[61570]: DEBUG oslo_concurrency.lockutils [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] Acquiring lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.016586] env[61570]: DEBUG oslo_concurrency.lockutils [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.016586] env[61570]: DEBUG oslo_concurrency.lockutils [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.016734] env[61570]: DEBUG nova.compute.manager [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] No waiting events found dispatching network-vif-plugged-3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 804.016734] env[61570]: WARNING nova.compute.manager [req-86a82ff6-fa28-4ca4-9ce1-96af51f6e520 req-27001d32-e738-40b1-b397-1381446a8936 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Received unexpected event network-vif-plugged-3a75fb31-58b0-4547-95af-af9a90f7375e for instance with vm_state building and task_state deleting. [ 804.435724] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891307, 'name': CreateVM_Task, 'duration_secs': 0.399759} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.435968] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 804.436602] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.436770] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.437153] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.437423] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6509626-67c1-4e3b-8cc7-5f8b13460303 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.445154] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Waiting for the task: (returnval){ [ 804.445154] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52d0024d-9d56-9248-2f35-4f46cee74e0d" [ 804.445154] env[61570]: _type = "Task" [ 804.445154] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.461020] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52d0024d-9d56-9248-2f35-4f46cee74e0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.957493] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.957751] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.958499] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.662019] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e9ef257-88df-4f81-9763-241e70538856 tempest-AttachInterfacesUnderV243Test-1001287889 tempest-AttachInterfacesUnderV243Test-1001287889-project-member] Acquiring lock "c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.662019] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e9ef257-88df-4f81-9763-241e70538856 tempest-AttachInterfacesUnderV243Test-1001287889 tempest-AttachInterfacesUnderV243Test-1001287889-project-member] Lock "c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.277217] env[61570]: DEBUG nova.compute.manager [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Received event network-changed-3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 807.277406] env[61570]: DEBUG nova.compute.manager [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Refreshing instance network info cache due to event network-changed-3a75fb31-58b0-4547-95af-af9a90f7375e. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 807.277888] env[61570]: DEBUG oslo_concurrency.lockutils [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] Acquiring lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.277888] env[61570]: DEBUG oslo_concurrency.lockutils [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] Acquired lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.278052] env[61570]: DEBUG nova.network.neutron [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Refreshing network info cache for port 3a75fb31-58b0-4547-95af-af9a90f7375e {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.350145] env[61570]: DEBUG nova.network.neutron [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Updated VIF entry in instance network info cache for port 3a75fb31-58b0-4547-95af-af9a90f7375e. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 808.350381] env[61570]: DEBUG nova.network.neutron [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Updating instance_info_cache with network_info: [{"id": "3a75fb31-58b0-4547-95af-af9a90f7375e", "address": "fa:16:3e:5e:df:85", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a75fb31-58", "ovs_interfaceid": "3a75fb31-58b0-4547-95af-af9a90f7375e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.367784] env[61570]: DEBUG oslo_concurrency.lockutils [req-8faa0d13-9458-49ed-b6da-cec5e41f1456 req-e1f004ae-d4dc-4634-924e-802f2712bbd4 service nova] Releasing lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.615995] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f5d44e55-fff1-42ea-ba13-7fedd2a9133d tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "0dc0f5b5-03fc-4b7c-9715-52c6746c86f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.616562] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f5d44e55-fff1-42ea-ba13-7fedd2a9133d tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "0dc0f5b5-03fc-4b7c-9715-52c6746c86f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.495424] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58511ce7-ca84-4e0b-8b6b-2cd65cfd4483 tempest-ServersNegativeTestJSON-1355265268 tempest-ServersNegativeTestJSON-1355265268-project-member] Acquiring lock "e3183113-8fc2-408a-a77b-2ac28473154a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.495935] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58511ce7-ca84-4e0b-8b6b-2cd65cfd4483 tempest-ServersNegativeTestJSON-1355265268 tempest-ServersNegativeTestJSON-1355265268-project-member] Lock "e3183113-8fc2-408a-a77b-2ac28473154a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.548518] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c8ab77fe-f962-4721-80bb-1b16d24059ef tempest-InstanceActionsTestJSON-892237482 tempest-InstanceActionsTestJSON-892237482-project-member] Acquiring lock "fd9b2275-0d1a-4e49-8e70-93cbc1b3d645" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.549247] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c8ab77fe-f962-4721-80bb-1b16d24059ef tempest-InstanceActionsTestJSON-892237482 tempest-InstanceActionsTestJSON-892237482-project-member] Lock "fd9b2275-0d1a-4e49-8e70-93cbc1b3d645" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.900459] env[61570]: DEBUG oslo_concurrency.lockutils [None req-4c9e7cee-49f6-4d7e-9623-aa7392b6b0ed tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "acc214aa-35e9-4302-89c7-9248bdda70f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.900459] env[61570]: DEBUG oslo_concurrency.lockutils [None req-4c9e7cee-49f6-4d7e-9623-aa7392b6b0ed tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "acc214aa-35e9-4302-89c7-9248bdda70f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.402039] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b45199b5-b53d-45af-91a3-57b0146e6186 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Acquiring lock "fc47b67f-b19a-4b9c-a6df-849c2d3c6797" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.402039] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b45199b5-b53d-45af-91a3-57b0146e6186 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "fc47b67f-b19a-4b9c-a6df-849c2d3c6797" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.003665] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5f71f08d-b970-46ff-ad19-cfd36fff56aa tempest-ServerActionsV293TestJSON-832661754 tempest-ServerActionsV293TestJSON-832661754-project-member] Acquiring lock "4e1969dc-292e-4322-be26-de7d11c405fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.003665] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5f71f08d-b970-46ff-ad19-cfd36fff56aa tempest-ServerActionsV293TestJSON-832661754 tempest-ServerActionsV293TestJSON-832661754-project-member] Lock "4e1969dc-292e-4322-be26-de7d11c405fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.018028] env[61570]: WARNING oslo_vmware.rw_handles [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 844.018028] env[61570]: ERROR oslo_vmware.rw_handles [ 844.018028] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 844.019477] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 844.019720] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Copying Virtual Disk [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/9186431c-829c-42a5-b1ee-5b6b91d206d3/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 844.020019] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49ecd4b3-db41-4c4e-ac3f-410d760338e8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.029224] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 844.029224] env[61570]: value = "task-4891318" [ 844.029224] env[61570]: _type = "Task" [ 844.029224] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.039450] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.539894] env[61570]: DEBUG oslo_vmware.exceptions [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 844.540602] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.541303] env[61570]: ERROR nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.541303] env[61570]: Faults: ['InvalidArgument'] [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] Traceback (most recent call last): [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] yield resources [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self.driver.spawn(context, instance, image_meta, [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self._vmops.spawn(context, instance, image_meta, injected_files, [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self._fetch_image_if_missing(context, vi) [ 844.541303] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] image_cache(vi, tmp_image_ds_loc) [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] vm_util.copy_virtual_disk( [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] session._wait_for_task(vmdk_copy_task) [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return self.wait_for_task(task_ref) [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return evt.wait() [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] result = hub.switch() [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 844.541709] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return self.greenlet.switch() [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self.f(*self.args, **self.kw) [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] raise exceptions.translate_fault(task_info.error) [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] Faults: ['InvalidArgument'] [ 844.542125] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] [ 844.542633] env[61570]: INFO nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Terminating instance [ 844.544624] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.544885] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.545633] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 844.545852] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 844.546105] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4475c469-60ff-4d41-9096-31b11d90ac9f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.549118] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f783e2-0952-4a4d-9fc6-42720daefca2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.557423] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 844.558937] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02aab960-737f-44ed-adc4-b7b2439a0792 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.560835] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.561044] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 844.561752] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff96fa30-a8ec-44d9-b027-8fc976b9999a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.567558] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for the task: (returnval){ [ 844.567558] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f625b2-8b20-76e8-bf3a-4adf6ffe8532" [ 844.567558] env[61570]: _type = "Task" [ 844.567558] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.576644] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f625b2-8b20-76e8-bf3a-4adf6ffe8532, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.632429] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 844.632682] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 844.632832] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleting the datastore file [datastore2] 766812ee-e272-4779-b85d-7fdde876e877 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.633123] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a3ac0c7-4516-4a03-830b-dde8df461b2e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.640959] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 844.640959] env[61570]: value = "task-4891320" [ 844.640959] env[61570]: _type = "Task" [ 844.640959] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.650304] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.079038] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 845.079318] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Creating directory with path [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.079554] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05d7dea9-95a0-4cc4-b5c0-3f07ca713f0f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.092905] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Created directory with path [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.093148] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Fetch image to [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 845.093339] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 845.094135] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72688e77-caab-4bbd-a6d5-3edb62bb5493 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.101822] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bfa8b7-2925-4ac8-8a32-822f0d689fd2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.112895] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432fed78-0907-4c54-be9e-5ce7fc45eee5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.161808] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94465ca-63a9-4c9f-afc0-4c48f4fd96a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.167083] env[61570]: DEBUG oslo_vmware.api [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06738} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.168766] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.168961] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 845.169164] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 845.169342] env[61570]: INFO nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Took 0.62 seconds to destroy the instance on the hypervisor. [ 845.171521] env[61570]: DEBUG nova.compute.claims [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 845.171603] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.171788] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.174392] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-73200ec6-a5f7-4d8c-a632-f61ffb924197 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.207130] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 845.241928] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 845.259479] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 845.259601] env[61570]: DEBUG nova.compute.provider_tree [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 845.277153] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 845.303346] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 845.327780] env[61570]: DEBUG oslo_vmware.rw_handles [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 845.397546] env[61570]: DEBUG oslo_vmware.rw_handles [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 845.397749] env[61570]: DEBUG oslo_vmware.rw_handles [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 845.692707] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95d46a3-1838-4c74-b90a-481cba518e9e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.701185] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7121f3-3309-4238-98ba-679a2c88b7e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.732511] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8518ea-6113-4196-845e-50144bd93578 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.741064] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6df173-2d32-4f7b-8ea3-0a400213cb18 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.755229] env[61570]: DEBUG nova.compute.provider_tree [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.764026] env[61570]: DEBUG nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.780729] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.609s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.781277] env[61570]: ERROR nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.781277] env[61570]: Faults: ['InvalidArgument'] [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] Traceback (most recent call last): [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self.driver.spawn(context, instance, image_meta, [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self._vmops.spawn(context, instance, image_meta, injected_files, [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self._fetch_image_if_missing(context, vi) [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] image_cache(vi, tmp_image_ds_loc) [ 845.781277] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] vm_util.copy_virtual_disk( [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] session._wait_for_task(vmdk_copy_task) [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return self.wait_for_task(task_ref) [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return evt.wait() [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] result = hub.switch() [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] return self.greenlet.switch() [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 845.781715] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] self.f(*self.args, **self.kw) [ 845.782160] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 845.782160] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] raise exceptions.translate_fault(task_info.error) [ 845.782160] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.782160] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] Faults: ['InvalidArgument'] [ 845.782160] env[61570]: ERROR nova.compute.manager [instance: 766812ee-e272-4779-b85d-7fdde876e877] [ 845.782160] env[61570]: DEBUG nova.compute.utils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 845.783642] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Build of instance 766812ee-e272-4779-b85d-7fdde876e877 was re-scheduled: A specified parameter was not correct: fileType [ 845.783642] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 845.784084] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 845.784271] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 845.784443] env[61570]: DEBUG nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 845.784630] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.272311] env[61570]: DEBUG nova.network.neutron [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.290513] env[61570]: INFO nova.compute.manager [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Took 0.51 seconds to deallocate network for instance. [ 846.435328] env[61570]: INFO nova.scheduler.client.report [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleted allocations for instance 766812ee-e272-4779-b85d-7fdde876e877 [ 846.459269] env[61570]: DEBUG oslo_concurrency.lockutils [None req-86ac1497-cce1-4702-ab9f-f5f861193394 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 299.521s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.463083] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 100.067s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.463083] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "766812ee-e272-4779-b85d-7fdde876e877-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.463083] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.463278] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.463278] env[61570]: INFO nova.compute.manager [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Terminating instance [ 846.464887] env[61570]: DEBUG nova.compute.manager [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 846.465095] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 846.465880] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5a5f48d-36fc-4ba4-a609-4a6e4dd9fcec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.476200] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1a2414-1c51-4fee-8796-ab5c898beec6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.486621] env[61570]: DEBUG nova.compute.manager [None req-d5630fa8-d955-41e3-87d0-d690fe6f2afc tempest-ImagesOneServerNegativeTestJSON-2032486270 tempest-ImagesOneServerNegativeTestJSON-2032486270-project-member] [instance: 966ce8b7-1a10-46f7-b113-e191cdc9a6ea] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.517242] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 766812ee-e272-4779-b85d-7fdde876e877 could not be found. [ 846.517476] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 846.517693] env[61570]: INFO nova.compute.manager [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Took 0.05 seconds to destroy the instance on the hypervisor. [ 846.517997] env[61570]: DEBUG oslo.service.loopingcall [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.518344] env[61570]: DEBUG nova.compute.manager [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 846.518468] env[61570]: DEBUG nova.network.neutron [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.523559] env[61570]: DEBUG nova.compute.manager [None req-d5630fa8-d955-41e3-87d0-d690fe6f2afc tempest-ImagesOneServerNegativeTestJSON-2032486270 tempest-ImagesOneServerNegativeTestJSON-2032486270-project-member] [instance: 966ce8b7-1a10-46f7-b113-e191cdc9a6ea] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.550318] env[61570]: DEBUG nova.network.neutron [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.555639] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d5630fa8-d955-41e3-87d0-d690fe6f2afc tempest-ImagesOneServerNegativeTestJSON-2032486270 tempest-ImagesOneServerNegativeTestJSON-2032486270-project-member] Lock "966ce8b7-1a10-46f7-b113-e191cdc9a6ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.012s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.558836] env[61570]: INFO nova.compute.manager [-] [instance: 766812ee-e272-4779-b85d-7fdde876e877] Took 0.04 seconds to deallocate network for instance. [ 846.566528] env[61570]: DEBUG nova.compute.manager [None req-2d949050-c65c-47e3-a7e4-9ed039be8712 tempest-ServerAddressesNegativeTestJSON-289184649 tempest-ServerAddressesNegativeTestJSON-289184649-project-member] [instance: 82b9c976-583a-46f1-b412-87dd225dba12] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.592680] env[61570]: DEBUG nova.compute.manager [None req-2d949050-c65c-47e3-a7e4-9ed039be8712 tempest-ServerAddressesNegativeTestJSON-289184649 tempest-ServerAddressesNegativeTestJSON-289184649-project-member] [instance: 82b9c976-583a-46f1-b412-87dd225dba12] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.630559] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2d949050-c65c-47e3-a7e4-9ed039be8712 tempest-ServerAddressesNegativeTestJSON-289184649 tempest-ServerAddressesNegativeTestJSON-289184649-project-member] Lock "82b9c976-583a-46f1-b412-87dd225dba12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.806s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.642369] env[61570]: DEBUG nova.compute.manager [None req-a4f923b0-7957-4d7f-a4b2-819fa77dbac5 tempest-ServerRescueTestJSON-316296412 tempest-ServerRescueTestJSON-316296412-project-member] [instance: fb6a424d-c4b0-4913-a4ef-aa361ff25101] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.685846] env[61570]: DEBUG nova.compute.manager [None req-a4f923b0-7957-4d7f-a4b2-819fa77dbac5 tempest-ServerRescueTestJSON-316296412 tempest-ServerRescueTestJSON-316296412-project-member] [instance: fb6a424d-c4b0-4913-a4ef-aa361ff25101] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.702852] env[61570]: DEBUG oslo_concurrency.lockutils [None req-eee48869-0ea8-4333-b800-306e17145a56 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "766812ee-e272-4779-b85d-7fdde876e877" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.242s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.708204] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a4f923b0-7957-4d7f-a4b2-819fa77dbac5 tempest-ServerRescueTestJSON-316296412 tempest-ServerRescueTestJSON-316296412-project-member] Lock "fb6a424d-c4b0-4913-a4ef-aa361ff25101" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.487s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.719141] env[61570]: DEBUG nova.compute.manager [None req-6a9413ab-bb7b-40ed-86d3-479f5af3d24b tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 497d6954-60e5-4a83-932c-c95de38b6f7b] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.748438] env[61570]: DEBUG nova.compute.manager [None req-6a9413ab-bb7b-40ed-86d3-479f5af3d24b tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 497d6954-60e5-4a83-932c-c95de38b6f7b] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.773858] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6a9413ab-bb7b-40ed-86d3-479f5af3d24b tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "497d6954-60e5-4a83-932c-c95de38b6f7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.736s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.786232] env[61570]: DEBUG nova.compute.manager [None req-a754c5e3-59a8-422c-b60b-265993097e13 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: abf366b6-380a-4108-b351-8a2f2cceb018] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.817337] env[61570]: DEBUG nova.compute.manager [None req-a754c5e3-59a8-422c-b60b-265993097e13 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: abf366b6-380a-4108-b351-8a2f2cceb018] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.842623] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a754c5e3-59a8-422c-b60b-265993097e13 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "abf366b6-380a-4108-b351-8a2f2cceb018" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.571s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.859221] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.912271] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.912494] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.914159] env[61570]: INFO nova.compute.claims [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.291141] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257052b4-4f6f-42f0-9dfc-ea818a4e4a12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.300345] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f134c75a-bbde-4e48-b7a4-813bc83106c0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.338164] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80997c9e-428a-4eff-9c34-5b150e0b3910 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.346748] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd35382f-200d-4182-94bd-45c65b5aa23e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.361824] env[61570]: DEBUG nova.compute.provider_tree [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.373606] env[61570]: DEBUG nova.scheduler.client.report [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.389789] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.477s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.390416] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 847.437528] env[61570]: DEBUG nova.compute.utils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.438975] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 847.439164] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 847.453110] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 847.524612] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 847.542899] env[61570]: DEBUG nova.policy [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '625e0cedad5f4154a435c1ff36e4c480', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb2aa2a9c3af4e059ab13f940dbf497a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 847.552886] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.552886] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.552886] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.553129] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.553178] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.553294] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.553505] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.553662] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.553825] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.553986] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.554174] env[61570]: DEBUG nova.virt.hardware [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.555063] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373ddc30-ea67-423c-84b6-9b6e490a738f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.564807] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeab771f-ee32-485a-a1c0-c6c64b53258c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.745759] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.745759] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.067496] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Successfully created port: 1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.014353] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Successfully updated port: 1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.026853] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.027015] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquired lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.027183] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.070723] env[61570]: DEBUG nova.compute.manager [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Received event network-vif-plugged-1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 849.070930] env[61570]: DEBUG oslo_concurrency.lockutils [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] Acquiring lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.071145] env[61570]: DEBUG oslo_concurrency.lockutils [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.071307] env[61570]: DEBUG oslo_concurrency.lockutils [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.071498] env[61570]: DEBUG nova.compute.manager [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] No waiting events found dispatching network-vif-plugged-1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 849.071618] env[61570]: WARNING nova.compute.manager [req-7149f59c-ed9c-4097-9e06-9c2252fe0d27 req-a2a433cc-3e36-44bd-a255-cf3ba565e602 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Received unexpected event network-vif-plugged-1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 for instance with vm_state building and task_state spawning. [ 849.087684] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.556440] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Updating instance_info_cache with network_info: [{"id": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "address": "fa:16:3e:31:2d:2c", "network": {"id": "3c873b9f-5fa1-4a8f-931e-f8de7f973bb8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1852175208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb2aa2a9c3af4e059ab13f940dbf497a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fc3d7cc-66", "ovs_interfaceid": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.573611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Releasing lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.573611] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance network_info: |[{"id": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "address": "fa:16:3e:31:2d:2c", "network": {"id": "3c873b9f-5fa1-4a8f-931e-f8de7f973bb8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1852175208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb2aa2a9c3af4e059ab13f940dbf497a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fc3d7cc-66", "ovs_interfaceid": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.573782] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:2d:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fc3d7cc-66d1-40f3-8bec-5fc55ccea395', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.581254] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Creating folder: Project (bb2aa2a9c3af4e059ab13f940dbf497a). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.581923] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39f8698a-c1e7-4a2e-90d5-f6fb6525a22b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.592956] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Created folder: Project (bb2aa2a9c3af4e059ab13f940dbf497a) in parent group-v953072. [ 849.593542] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Creating folder: Instances. Parent ref: group-v953124. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.593542] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50c3f74d-e151-4d5d-a6ec-e0f0d9459c40 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.603576] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Created folder: Instances in parent group-v953124. [ 849.603576] env[61570]: DEBUG oslo.service.loopingcall [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.603730] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 849.603820] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78779d72-c9e1-4ce4-92c6-2d9b5516fd96 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.624068] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.624068] env[61570]: value = "task-4891323" [ 849.624068] env[61570]: _type = "Task" [ 849.624068] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.632615] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891323, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.134383] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891323, 'name': CreateVM_Task, 'duration_secs': 0.319766} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.134695] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 850.135234] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.135399] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.135715] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.135981] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3554bf0-b588-4e0e-9d3c-9dae591f5a4f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.141380] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for the task: (returnval){ [ 850.141380] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52de7433-50e2-6a61-d9fa-7d478fdebf88" [ 850.141380] env[61570]: _type = "Task" [ 850.141380] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.151194] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52de7433-50e2-6a61-d9fa-7d478fdebf88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.652576] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.652576] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.652824] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.097205] env[61570]: DEBUG nova.compute.manager [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Received event network-changed-1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 851.097582] env[61570]: DEBUG nova.compute.manager [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Refreshing instance network info cache due to event network-changed-1fc3d7cc-66d1-40f3-8bec-5fc55ccea395. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 851.098736] env[61570]: DEBUG oslo_concurrency.lockutils [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] Acquiring lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.098736] env[61570]: DEBUG oslo_concurrency.lockutils [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] Acquired lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.098736] env[61570]: DEBUG nova.network.neutron [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Refreshing network info cache for port 1fc3d7cc-66d1-40f3-8bec-5fc55ccea395 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 851.525657] env[61570]: DEBUG nova.network.neutron [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Updated VIF entry in instance network info cache for port 1fc3d7cc-66d1-40f3-8bec-5fc55ccea395. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 851.526025] env[61570]: DEBUG nova.network.neutron [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Updating instance_info_cache with network_info: [{"id": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "address": "fa:16:3e:31:2d:2c", "network": {"id": "3c873b9f-5fa1-4a8f-931e-f8de7f973bb8", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1852175208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb2aa2a9c3af4e059ab13f940dbf497a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fc3d7cc-66", "ovs_interfaceid": "1fc3d7cc-66d1-40f3-8bec-5fc55ccea395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.535909] env[61570]: DEBUG oslo_concurrency.lockutils [req-849446d0-27a3-4ef5-b637-85a952dc9e6f req-6aa027b0-313c-4103-8879-056467ab3811 service nova] Releasing lock "refresh_cache-e4f4573c-040a-49d6-ba20-e051a265b3e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.754029] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.754029] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 855.754402] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 855.776595] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.776834] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.776931] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.776996] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777134] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777247] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777367] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777485] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777600] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777716] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 855.777836] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 858.752323] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.752636] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.752750] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 858.752935] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.764752] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.764983] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.765176] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.765333] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 858.766721] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d278523-66bc-4560-847b-a574a2fb07f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.777497] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba4e8ae-b383-4c30-952f-0354bcc3cd13 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.793118] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c09cd3b-d71c-433d-9c50-33a3e549d2e2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.800695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9eb54d-870a-4587-ad7c-e1fb177fdde5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.830189] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180604MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 858.830389] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.830558] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.915578] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.915751] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.915882] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.916013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917022] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917022] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917022] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917022] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917181] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.917181] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 858.931478] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 29078f8b-7d23-4d10-ab27-88c49ac7fa97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 858.941485] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 858.954272] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 858.967556] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 43599711-7de0-465c-a8ab-fc24d90ed9c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 858.978870] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3914246-a7b8-46d3-b8c1-3c7254a30693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 858.990266] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 94e9909d-31a5-4d2c-a12b-aaebb32d4445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.001258] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.011536] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.022424] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0dc0f5b5-03fc-4b7c-9715-52c6746c86f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.032703] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e3183113-8fc2-408a-a77b-2ac28473154a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.043602] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fd9b2275-0d1a-4e49-8e70-93cbc1b3d645 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.057259] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance acc214aa-35e9-4302-89c7-9248bdda70f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.068912] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fc47b67f-b19a-4b9c-a6df-849c2d3c6797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.080116] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4e1969dc-292e-4322-be26-de7d11c405fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.091617] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 859.091864] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 859.092046] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '22', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_a90ae0f6cf7e45b9a7408bdd6317387e': '1', 'io_workload': '10', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_task_spawning': '2', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 859.417742] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4961554-75bb-4fde-b906-7dfcbfe6e17d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.426239] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e2ef45-0bf6-4f80-9539-a2f5fda3ea48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.457554] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc96461-1669-4bc3-8ec1-d95c3ccfa8b3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.467149] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9977e3b6-b9e5-4480-b821-80307a29e1cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.480984] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.489948] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.504881] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 859.505090] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.675s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.505254] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.505614] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.753233] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.594684] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.748507] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.752133] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.748778] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.559887] env[61570]: WARNING oslo_vmware.rw_handles [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 894.559887] env[61570]: ERROR oslo_vmware.rw_handles [ 894.560701] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 894.562361] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 894.562756] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Copying Virtual Disk [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/82120207-5ae4-4798-9ec5-820650b3423c/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 894.563110] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81a9c7c6-8f5f-4d97-86d4-7ba3571daa97 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.572377] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for the task: (returnval){ [ 894.572377] env[61570]: value = "task-4891324" [ 894.572377] env[61570]: _type = "Task" [ 894.572377] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.580891] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Task: {'id': task-4891324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.085157] env[61570]: DEBUG oslo_vmware.exceptions [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 895.085157] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.085157] env[61570]: ERROR nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.085157] env[61570]: Faults: ['InvalidArgument'] [ 895.085157] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Traceback (most recent call last): [ 895.085157] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 895.085157] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] yield resources [ 895.085157] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 895.085157] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self.driver.spawn(context, instance, image_meta, [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self._fetch_image_if_missing(context, vi) [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] image_cache(vi, tmp_image_ds_loc) [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] vm_util.copy_virtual_disk( [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] session._wait_for_task(vmdk_copy_task) [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return self.wait_for_task(task_ref) [ 895.085475] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return evt.wait() [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] result = hub.switch() [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return self.greenlet.switch() [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self.f(*self.args, **self.kw) [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] raise exceptions.translate_fault(task_info.error) [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Faults: ['InvalidArgument'] [ 895.086093] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] [ 895.086488] env[61570]: INFO nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Terminating instance [ 895.087066] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.087283] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.088094] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8efc55c-9afe-43f1-bfd4-e0ac088013a6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.089832] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 895.090047] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 895.090793] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e12b9f-5e16-473d-825c-2754e26f3735 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.098679] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 895.098952] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a05c998-7062-4ca4-91ad-a6ae67a12345 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.101337] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.101504] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 895.102545] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecf6c152-13ec-4e2a-8d99-2fc042c2f655 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.107790] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for the task: (returnval){ [ 895.107790] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5233af0a-0253-f466-780a-6c93a90ef49a" [ 895.107790] env[61570]: _type = "Task" [ 895.107790] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.115873] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5233af0a-0253-f466-780a-6c93a90ef49a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.171864] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 895.172241] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 895.172361] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Deleting the datastore file [datastore2] 25e22032-2ee7-44df-ae6a-022b5bda9f2c {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.172665] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ddc40dc-bd9e-4afc-8a08-ad95e735bfe4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.180705] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for the task: (returnval){ [ 895.180705] env[61570]: value = "task-4891326" [ 895.180705] env[61570]: _type = "Task" [ 895.180705] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.189486] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Task: {'id': task-4891326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.618959] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 895.619315] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Creating directory with path [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.619558] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-016ebf98-e189-4e9d-ba6d-a33d26db22d9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.633315] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Created directory with path [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.633981] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Fetch image to [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 895.633981] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 895.634632] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d454193-41c0-4e8a-ab5a-c31d5c4444bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.642704] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69da1271-198a-4f34-be11-70255b01e20a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.654561] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30873411-fa23-4af0-8c89-bc3eb668d6ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.690201] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e539b3-16e3-4840-9ca9-d8d512c6d5b1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.699896] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3fde967c-0a52-4e04-91ac-5364ca2cf70a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.700972] env[61570]: DEBUG oslo_vmware.api [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Task: {'id': task-4891326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088426} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.701239] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.701421] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 895.701591] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 895.701764] env[61570]: INFO nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 895.704053] env[61570]: DEBUG nova.compute.claims [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 895.704303] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.704445] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.725517] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 895.784520] env[61570]: DEBUG oslo_vmware.rw_handles [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 895.844998] env[61570]: DEBUG oslo_vmware.rw_handles [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 895.845330] env[61570]: DEBUG oslo_vmware.rw_handles [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 896.135584] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9a0f4d-3cf1-449e-bcd2-f03de4def3ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.143830] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0dc205-08d3-43f8-8f33-2d167e7726f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.175029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f06f55-e896-461a-8594-930ddb919547 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.182796] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba52b24a-3862-464e-b001-b64ee7947b83 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.196611] env[61570]: DEBUG nova.compute.provider_tree [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.208061] env[61570]: DEBUG nova.scheduler.client.report [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.223231] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.518s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.223557] env[61570]: ERROR nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 896.223557] env[61570]: Faults: ['InvalidArgument'] [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Traceback (most recent call last): [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self.driver.spawn(context, instance, image_meta, [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self._fetch_image_if_missing(context, vi) [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] image_cache(vi, tmp_image_ds_loc) [ 896.223557] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] vm_util.copy_virtual_disk( [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] session._wait_for_task(vmdk_copy_task) [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return self.wait_for_task(task_ref) [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return evt.wait() [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] result = hub.switch() [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] return self.greenlet.switch() [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 896.224015] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] self.f(*self.args, **self.kw) [ 896.224448] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 896.224448] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] raise exceptions.translate_fault(task_info.error) [ 896.224448] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 896.224448] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Faults: ['InvalidArgument'] [ 896.224448] env[61570]: ERROR nova.compute.manager [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] [ 896.224448] env[61570]: DEBUG nova.compute.utils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 896.225799] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Build of instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c was re-scheduled: A specified parameter was not correct: fileType [ 896.225799] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 896.226192] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 896.226364] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 896.226517] env[61570]: DEBUG nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 896.226673] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.612375] env[61570]: DEBUG nova.network.neutron [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.625669] env[61570]: INFO nova.compute.manager [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Took 0.40 seconds to deallocate network for instance. [ 896.733121] env[61570]: INFO nova.scheduler.client.report [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Deleted allocations for instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c [ 896.764088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-836e6d86-6519-44e0-b511-18aed8fd8caa tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 344.173s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.766787] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 143.591s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.766919] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Acquiring lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.767228] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.767408] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.772019] env[61570]: INFO nova.compute.manager [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Terminating instance [ 896.773298] env[61570]: DEBUG nova.compute.manager [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 896.773674] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.774102] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87d74ef4-cde5-4b57-acdc-d4f9e1714bb1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.788263] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2362c6-0124-462d-984d-9d4a864ac0b5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.801773] env[61570]: DEBUG nova.compute.manager [None req-7e103740-734b-4ea4-bd54-4d40a1d21b92 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 29078f8b-7d23-4d10-ab27-88c49ac7fa97] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 896.834424] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25e22032-2ee7-44df-ae6a-022b5bda9f2c could not be found. [ 896.834424] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.834424] env[61570]: INFO nova.compute.manager [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 896.834843] env[61570]: DEBUG oslo.service.loopingcall [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.835162] env[61570]: DEBUG nova.compute.manager [None req-7e103740-734b-4ea4-bd54-4d40a1d21b92 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 29078f8b-7d23-4d10-ab27-88c49ac7fa97] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 896.836388] env[61570]: DEBUG nova.compute.manager [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 896.836493] env[61570]: DEBUG nova.network.neutron [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.865140] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7e103740-734b-4ea4-bd54-4d40a1d21b92 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "29078f8b-7d23-4d10-ab27-88c49ac7fa97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.947s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.866813] env[61570]: DEBUG nova.network.neutron [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.876127] env[61570]: INFO nova.compute.manager [-] [instance: 25e22032-2ee7-44df-ae6a-022b5bda9f2c] Took 0.04 seconds to deallocate network for instance. [ 896.876643] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 896.941514] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.941631] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.943193] env[61570]: INFO nova.compute.claims [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.012292] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ae15e098-7ad6-4c38-a877-47a8ad87fe99 tempest-TenantUsagesTestJSON-907446386 tempest-TenantUsagesTestJSON-907446386-project-member] Lock "25e22032-2ee7-44df-ae6a-022b5bda9f2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.336400] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.337188] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.390053] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0161f2b6-3c91-4108-bd53-5f07b6a663d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.401224] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568f2d5d-aa71-4299-a328-b96066cbce74 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.435027] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33473873-2e98-4c8d-9b0e-5b96cbb22212 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.443650] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2bd472-caeb-4b7d-97ee-7e57207dc153 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.458147] env[61570]: DEBUG nova.compute.provider_tree [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.472041] env[61570]: DEBUG nova.scheduler.client.report [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 897.491410] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.550s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.492029] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 897.531873] env[61570]: DEBUG nova.compute.utils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.533307] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 897.533438] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 897.546618] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 897.634691] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 897.667867] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 897.668139] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 897.668303] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.668488] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 897.668632] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.668778] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 897.669282] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 897.669339] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 897.669939] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 897.670180] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 897.670368] env[61570]: DEBUG nova.virt.hardware [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 897.671624] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbd5d20-a9f6-45fc-8647-7498497f9384 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.676137] env[61570]: DEBUG nova.policy [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa1b03bd9a8c4d68b21125c9ea3c871e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ad35672443f4c9d97f0240cadfb986d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 897.684758] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33dcd1f-84fe-4520-b5ff-3393a1a9ff38 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.291092] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Successfully created port: 2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 899.028340] env[61570]: DEBUG nova.compute.manager [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Received event network-vif-plugged-2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 899.028605] env[61570]: DEBUG oslo_concurrency.lockutils [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] Acquiring lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.028796] env[61570]: DEBUG oslo_concurrency.lockutils [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.028796] env[61570]: DEBUG oslo_concurrency.lockutils [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.028970] env[61570]: DEBUG nova.compute.manager [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] No waiting events found dispatching network-vif-plugged-2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 899.029131] env[61570]: WARNING nova.compute.manager [req-eca0c803-9842-4afe-8d10-a95146a4298e req-e5c4b329-38e7-4e5f-bb78-73d2dd4a1404 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Received unexpected event network-vif-plugged-2ee0faef-9df8-4b2b-8966-dd9491219b3c for instance with vm_state building and task_state spawning. [ 899.109836] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Successfully updated port: 2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.126572] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.126766] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.126922] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 899.173793] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 899.421777] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Updating instance_info_cache with network_info: [{"id": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "address": "fa:16:3e:e5:d1:6a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0faef-9d", "ovs_interfaceid": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.437415] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.437746] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance network_info: |[{"id": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "address": "fa:16:3e:e5:d1:6a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0faef-9d", "ovs_interfaceid": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.438300] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:d1:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ee0faef-9df8-4b2b-8966-dd9491219b3c', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.446221] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating folder: Project (0ad35672443f4c9d97f0240cadfb986d). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 899.447312] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11ed2dd-4b74-4c65-bf41-49c9755e52bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.460875] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created folder: Project (0ad35672443f4c9d97f0240cadfb986d) in parent group-v953072. [ 899.460875] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating folder: Instances. Parent ref: group-v953127. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 899.461420] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c34adbb-718e-4277-a52b-ad25713c647f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.472645] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created folder: Instances in parent group-v953127. [ 899.473509] env[61570]: DEBUG oslo.service.loopingcall [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 899.473509] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 899.473509] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-377df24d-1d52-4ab9-b757-f1bd8f438410 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.493984] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.493984] env[61570]: value = "task-4891329" [ 899.493984] env[61570]: _type = "Task" [ 899.493984] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.502324] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891329, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.008020] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891329, 'name': CreateVM_Task, 'duration_secs': 0.312569} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.008020] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 900.008020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.008020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.008020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 900.008265] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e65d569a-3506-4c5a-898c-a7acbdadb64d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.012116] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 900.012116] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5219ab34-bbfb-09b6-ba07-5e283a687583" [ 900.012116] env[61570]: _type = "Task" [ 900.012116] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.022123] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5219ab34-bbfb-09b6-ba07-5e283a687583, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.523806] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.523806] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.523806] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.161143] env[61570]: DEBUG nova.compute.manager [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Received event network-changed-2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 901.161143] env[61570]: DEBUG nova.compute.manager [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Refreshing instance network info cache due to event network-changed-2ee0faef-9df8-4b2b-8966-dd9491219b3c. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 901.161323] env[61570]: DEBUG oslo_concurrency.lockutils [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] Acquiring lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.161464] env[61570]: DEBUG oslo_concurrency.lockutils [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] Acquired lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.161627] env[61570]: DEBUG nova.network.neutron [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Refreshing network info cache for port 2ee0faef-9df8-4b2b-8966-dd9491219b3c {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.571288] env[61570]: DEBUG nova.network.neutron [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Updated VIF entry in instance network info cache for port 2ee0faef-9df8-4b2b-8966-dd9491219b3c. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 901.571643] env[61570]: DEBUG nova.network.neutron [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Updating instance_info_cache with network_info: [{"id": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "address": "fa:16:3e:e5:d1:6a", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0faef-9d", "ovs_interfaceid": "2ee0faef-9df8-4b2b-8966-dd9491219b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.583712] env[61570]: DEBUG oslo_concurrency.lockutils [req-a883e36a-6056-497e-a4ba-dbb1ea71f58d req-8e86f89c-ca15-49ce-bb39-14135ff994a1 service nova] Releasing lock "refresh_cache-df50c085-3eee-44c2-8d14-263f3bf49b2d" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.617604] env[61570]: DEBUG oslo_concurrency.lockutils [None req-53ac5f31-d284-40f7-b864-f11fe21d7650 tempest-ServerShowV254Test-616399010 tempest-ServerShowV254Test-616399010-project-member] Acquiring lock "a0e94a85-8b9b-4394-bbaa-cc21786d3d01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.617920] env[61570]: DEBUG oslo_concurrency.lockutils [None req-53ac5f31-d284-40f7-b864-f11fe21d7650 tempest-ServerShowV254Test-616399010 tempest-ServerShowV254Test-616399010-project-member] Lock "a0e94a85-8b9b-4394-bbaa-cc21786d3d01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.755133] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.755133] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 917.755133] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 917.780124] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.780341] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.780514] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.780679] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.780837] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.780969] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.781110] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.781229] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.781342] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.781456] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 917.781576] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 918.754023] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.754023] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.754023] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 918.754023] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.766704] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.767160] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.767223] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.767414] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 918.768692] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567759b0-97c2-409e-b16b-c0f5626eb166 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.778278] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0095271d-15c1-4916-be98-fa337aa1a3a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.793302] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d4329a-6a4a-4e2b-afc3-b3cba506ac7a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.800325] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d848508c-d287-4119-8745-adc29dab98b6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.830715] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180571MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 918.830895] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.831105] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.905029] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905029] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905029] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905029] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905258] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905258] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905258] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905258] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905393] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.905393] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 918.920639] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.931725] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 43599711-7de0-465c-a8ab-fc24d90ed9c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.943765] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3914246-a7b8-46d3-b8c1-3c7254a30693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.953039] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 94e9909d-31a5-4d2c-a12b-aaebb32d4445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.965143] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.976666] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.987076] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0dc0f5b5-03fc-4b7c-9715-52c6746c86f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 918.996670] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e3183113-8fc2-408a-a77b-2ac28473154a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.006651] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fd9b2275-0d1a-4e49-8e70-93cbc1b3d645 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.017281] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance acc214aa-35e9-4302-89c7-9248bdda70f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.027245] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fc47b67f-b19a-4b9c-a6df-849c2d3c6797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.037675] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4e1969dc-292e-4322-be26-de7d11c405fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.048988] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.061711] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.072988] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance a0e94a85-8b9b-4394-bbaa-cc21786d3d01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 919.073276] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 919.073560] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '24', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'io_workload': '10', 'num_proj_5beaa7046a57489b8b0ca03a79344d08': '1', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_task_spawning': '2', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 919.361738] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f21c2a6-cda0-4b16-a7c6-a311799cc468 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.370122] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962ec720-aa99-41c3-a081-0388d75d0283 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.402039] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1bb6e4-a16f-478b-8798-422264fe0e92 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.410215] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0736d59f-142d-49a2-a166-1a9a7c2514a8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.425076] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.434122] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.455220] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 919.455439] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.624s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.455779] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.753449] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.753687] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.753206] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.748684] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.737294] env[61570]: WARNING oslo_vmware.rw_handles [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 943.737294] env[61570]: ERROR oslo_vmware.rw_handles [ 943.737963] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 943.740865] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 943.740865] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Copying Virtual Disk [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/14a7ab68-d3bb-4bff-8d92-4057c6f01fa4/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 943.740865] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b2a4710-1496-427e-922f-000385ae4c65 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.751743] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for the task: (returnval){ [ 943.751743] env[61570]: value = "task-4891330" [ 943.751743] env[61570]: _type = "Task" [ 943.751743] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.760788] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Task: {'id': task-4891330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.262881] env[61570]: DEBUG oslo_vmware.exceptions [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 944.263231] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.263798] env[61570]: ERROR nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 944.263798] env[61570]: Faults: ['InvalidArgument'] [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Traceback (most recent call last): [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] yield resources [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self.driver.spawn(context, instance, image_meta, [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self._fetch_image_if_missing(context, vi) [ 944.263798] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] image_cache(vi, tmp_image_ds_loc) [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] vm_util.copy_virtual_disk( [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] session._wait_for_task(vmdk_copy_task) [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return self.wait_for_task(task_ref) [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return evt.wait() [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] result = hub.switch() [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 944.264324] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return self.greenlet.switch() [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self.f(*self.args, **self.kw) [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] raise exceptions.translate_fault(task_info.error) [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Faults: ['InvalidArgument'] [ 944.264780] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] [ 944.264780] env[61570]: INFO nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Terminating instance [ 944.265760] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.265971] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.266221] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b70500d-6e2a-4c5c-b9a3-58ccd521bb43 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.268543] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 944.268723] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 944.269476] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc417ab-630f-4f4f-9640-b9557c8716ad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.277069] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 944.277339] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e35254eb-cd7c-4efe-9b2c-446d0397d88b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.279877] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.280064] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 944.281097] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62244529-b378-4393-8223-df2569eb5b71 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.286907] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 944.286907] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]527d2156-9975-b0a8-9b0a-83f6e923b0f3" [ 944.286907] env[61570]: _type = "Task" [ 944.286907] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.296621] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]527d2156-9975-b0a8-9b0a-83f6e923b0f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.355841] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 944.356314] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 944.356670] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Deleting the datastore file [datastore2] bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.357110] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f91f1ad0-0d76-4b44-ae8f-cba3e582050e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.365786] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for the task: (returnval){ [ 944.365786] env[61570]: value = "task-4891332" [ 944.365786] env[61570]: _type = "Task" [ 944.365786] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.377993] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Task: {'id': task-4891332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.800021] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 944.800021] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating directory with path [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.800021] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4114b1e6-b233-4284-acda-02d0ec59f8a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.813405] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created directory with path [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.813939] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Fetch image to [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 944.814366] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 944.815336] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb0df29-cdaf-46f1-ac31-6be8d5cb34bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.823846] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6ce45f-a527-4f9c-88e9-ca5e18d9af50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.834811] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92906f51-3ae3-4071-b952-b1301fbb86b5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.870142] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d44e86e-0824-403b-81cd-c462fe0ea326 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.879726] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c358dcde-4ab6-4a20-97f0-52aefabea27a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.881722] env[61570]: DEBUG oslo_vmware.api [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Task: {'id': task-4891332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086522} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.882020] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.882222] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 944.882396] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 944.882569] env[61570]: INFO nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 944.885169] env[61570]: DEBUG nova.compute.claims [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 944.885379] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.885609] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.907036] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 944.968518] env[61570]: DEBUG oslo_vmware.rw_handles [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 945.031292] env[61570]: DEBUG oslo_vmware.rw_handles [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 945.031632] env[61570]: DEBUG oslo_vmware.rw_handles [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 945.341092] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc4bb76-41c6-4805-938f-cd0ee85daf51 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.349715] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fc0715-b664-426d-aa51-cb1ce65ab075 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.382318] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c91152-4955-42eb-aed3-ec738a976581 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.390537] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e614bd5-f9a4-4440-b961-4a6a0b01c4dc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.404960] env[61570]: DEBUG nova.compute.provider_tree [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.418011] env[61570]: DEBUG nova.scheduler.client.report [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.462040] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.576s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.462276] env[61570]: ERROR nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 945.462276] env[61570]: Faults: ['InvalidArgument'] [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Traceback (most recent call last): [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self.driver.spawn(context, instance, image_meta, [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self._fetch_image_if_missing(context, vi) [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] image_cache(vi, tmp_image_ds_loc) [ 945.462276] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] vm_util.copy_virtual_disk( [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] session._wait_for_task(vmdk_copy_task) [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return self.wait_for_task(task_ref) [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return evt.wait() [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] result = hub.switch() [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] return self.greenlet.switch() [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 945.462658] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] self.f(*self.args, **self.kw) [ 945.463182] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 945.463182] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] raise exceptions.translate_fault(task_info.error) [ 945.463182] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 945.463182] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Faults: ['InvalidArgument'] [ 945.463182] env[61570]: ERROR nova.compute.manager [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] [ 945.463182] env[61570]: DEBUG nova.compute.utils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 945.466029] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Build of instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 was re-scheduled: A specified parameter was not correct: fileType [ 945.466029] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 945.466029] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 945.466029] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 945.466029] env[61570]: DEBUG nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 945.466370] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.877619] env[61570]: DEBUG nova.network.neutron [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.901541] env[61570]: INFO nova.compute.manager [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Took 0.44 seconds to deallocate network for instance. [ 946.017478] env[61570]: INFO nova.scheduler.client.report [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Deleted allocations for instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 [ 946.041555] env[61570]: DEBUG oslo_concurrency.lockutils [None req-740d2859-b408-4e42-bf27-de8a3110cfa1 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 390.012s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.043099] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 191.216s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.043245] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Acquiring lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.044041] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.044041] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.046449] env[61570]: INFO nova.compute.manager [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Terminating instance [ 946.049227] env[61570]: DEBUG nova.compute.manager [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 946.049348] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 946.049614] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a6285cf-80b6-44c8-b8e0-8376ac7e3acc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.061086] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a945622-8b32-49db-99dc-7e3afdf952c7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.073514] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 946.100035] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf561c56-a65d-4bfb-80c0-a68b3bddfdb7 could not be found. [ 946.100035] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 946.100035] env[61570]: INFO nova.compute.manager [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 946.100308] env[61570]: DEBUG oslo.service.loopingcall [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.100488] env[61570]: DEBUG nova.compute.manager [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 946.100594] env[61570]: DEBUG nova.network.neutron [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.135315] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.135990] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.137316] env[61570]: INFO nova.compute.claims [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.140531] env[61570]: DEBUG nova.network.neutron [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.151056] env[61570]: INFO nova.compute.manager [-] [instance: bf561c56-a65d-4bfb-80c0-a68b3bddfdb7] Took 0.05 seconds to deallocate network for instance. [ 946.269910] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31901a43-c0ad-4a87-8f6b-0e7800e7df28 tempest-AttachInterfacesV270Test-214506591 tempest-AttachInterfacesV270Test-214506591-project-member] Lock "bf561c56-a65d-4bfb-80c0-a68b3bddfdb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.227s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.575245] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595df8cb-eac6-455d-aeeb-5c749e2c6c81 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.587241] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b303cbc-06b8-4d3c-8b7e-6fe6f18a681e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.642368] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c29244-2e75-485f-a2b7-e1b708505e9d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.655192] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000a9a7b-8ee3-485f-a3c1-f80978cfc7d0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.680381] env[61570]: DEBUG nova.compute.provider_tree [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.691739] env[61570]: DEBUG nova.scheduler.client.report [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.716316] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.580s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.717096] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 946.768800] env[61570]: DEBUG nova.compute.utils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.770112] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 946.770281] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 946.781827] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 946.850011] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 946.862735] env[61570]: DEBUG nova.policy [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa1b03bd9a8c4d68b21125c9ea3c871e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ad35672443f4c9d97f0240cadfb986d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 946.878782] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 946.879119] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 946.879186] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.879370] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 946.879513] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.879656] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 946.879860] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 946.880039] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 946.880191] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 946.880355] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 946.880525] env[61570]: DEBUG nova.virt.hardware [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 946.881411] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9c721b-c4dc-42d5-8c03-1944a68fa057 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.890602] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1eaa412-6b55-47e3-8535-06f1e8f85fec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.328252] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Successfully created port: 0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.261920] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Successfully updated port: 0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.265051] env[61570]: DEBUG nova.compute.manager [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Received event network-vif-plugged-0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 948.265359] env[61570]: DEBUG oslo_concurrency.lockutils [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] Acquiring lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.265522] env[61570]: DEBUG oslo_concurrency.lockutils [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.265626] env[61570]: DEBUG oslo_concurrency.lockutils [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.265830] env[61570]: DEBUG nova.compute.manager [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] No waiting events found dispatching network-vif-plugged-0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 948.265958] env[61570]: WARNING nova.compute.manager [req-3db41b55-ce95-4ffb-aaf6-645dbbd9eac8 req-9b632d81-6bef-4db2-96ab-63bc8d4127e1 service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Received unexpected event network-vif-plugged-0c3f0c5a-3ce2-4d80-a59c-681a0b65623c for instance with vm_state building and task_state spawning. [ 948.280363] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.280363] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.280363] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 948.347313] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 948.619590] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Updating instance_info_cache with network_info: [{"id": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "address": "fa:16:3e:6d:f4:c3", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c3f0c5a-3c", "ovs_interfaceid": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.636073] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.636489] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance network_info: |[{"id": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "address": "fa:16:3e:6d:f4:c3", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c3f0c5a-3c", "ovs_interfaceid": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 948.637265] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:f4:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c3f0c5a-3ce2-4d80-a59c-681a0b65623c', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 948.644816] env[61570]: DEBUG oslo.service.loopingcall [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.646023] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 948.646299] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f1b622f-ac9a-46bb-8474-5ff6764cda3b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.667331] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 948.667331] env[61570]: value = "task-4891333" [ 948.667331] env[61570]: _type = "Task" [ 948.667331] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.676409] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891333, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.181169] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891333, 'name': CreateVM_Task, 'duration_secs': 0.332688} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.181369] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 949.182040] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.182722] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.183221] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.183543] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb6fc686-029d-4375-8df9-387d385b8afe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.188474] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 949.188474] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f22d04-924d-424b-08f6-1541e15c58c8" [ 949.188474] env[61570]: _type = "Task" [ 949.188474] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.198880] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f22d04-924d-424b-08f6-1541e15c58c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.699124] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.699455] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.699695] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.384749] env[61570]: DEBUG nova.compute.manager [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Received event network-changed-0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 950.384966] env[61570]: DEBUG nova.compute.manager [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Refreshing instance network info cache due to event network-changed-0c3f0c5a-3ce2-4d80-a59c-681a0b65623c. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 950.385252] env[61570]: DEBUG oslo_concurrency.lockutils [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] Acquiring lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.385457] env[61570]: DEBUG oslo_concurrency.lockutils [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] Acquired lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.385661] env[61570]: DEBUG nova.network.neutron [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Refreshing network info cache for port 0c3f0c5a-3ce2-4d80-a59c-681a0b65623c {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 951.100045] env[61570]: DEBUG nova.network.neutron [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Updated VIF entry in instance network info cache for port 0c3f0c5a-3ce2-4d80-a59c-681a0b65623c. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 951.100362] env[61570]: DEBUG nova.network.neutron [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Updating instance_info_cache with network_info: [{"id": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "address": "fa:16:3e:6d:f4:c3", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c3f0c5a-3c", "ovs_interfaceid": "0c3f0c5a-3ce2-4d80-a59c-681a0b65623c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.116574] env[61570]: DEBUG oslo_concurrency.lockutils [req-a1549742-c325-4006-a8d3-0c098d0637cc req-257d4357-a9fd-4619-9b9f-fdabf3677bab service nova] Releasing lock "refresh_cache-0b77e196-4948-4a76-8e87-75e9b1e5df55" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.722097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.047948] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.048260] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.233680] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6f26fceb-8d46-4d6f-9579-56bea14629a0 tempest-ServersTestManualDisk-769530409 tempest-ServersTestManualDisk-769530409-project-member] Acquiring lock "2ea0e9df-20df-4d6b-9214-c94e0b8f8468" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.234082] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6f26fceb-8d46-4d6f-9579-56bea14629a0 tempest-ServersTestManualDisk-769530409 tempest-ServersTestManualDisk-769530409-project-member] Lock "2ea0e9df-20df-4d6b-9214-c94e0b8f8468" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.406479] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d05e5743-9351-419d-9efd-a451321c2296 tempest-InstanceActionsV221TestJSON-1159670729 tempest-InstanceActionsV221TestJSON-1159670729-project-member] Acquiring lock "66db9bd5-4f21-475a-be59-c38a4b45e43e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.407088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d05e5743-9351-419d-9efd-a451321c2296 tempest-InstanceActionsV221TestJSON-1159670729 tempest-InstanceActionsV221TestJSON-1159670729-project-member] Lock "66db9bd5-4f21-475a-be59-c38a4b45e43e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.542524] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.752913] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.753209] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 978.753312] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 978.778311] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.778547] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.778733] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.778911] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779168] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779284] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779392] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779576] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779662] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779784] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 978.779907] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 978.780488] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.793153] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.793422] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.793597] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.793755] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 978.794933] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7c58c3-adb2-43d9-8ea0-3b3bf5df16e5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.803913] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5262328-9a44-4d83-93db-6037eee008a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.818424] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1e62a1-2e50-4fc7-a0b4-47e354af66f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.825963] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6806de3-f1fe-436b-82c8-4783937e52f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.857495] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180595MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 978.857716] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.857900] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.939860] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ded35886-716c-4725-8fc9-cd6dfc04281a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.940787] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.940787] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.940787] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.940787] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.941013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.941013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.941013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.941013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.941129] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 978.954971] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 43599711-7de0-465c-a8ab-fc24d90ed9c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 978.966933] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3914246-a7b8-46d3-b8c1-3c7254a30693 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 978.978676] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 94e9909d-31a5-4d2c-a12b-aaebb32d4445 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 978.996119] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.015131] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.028066] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0dc0f5b5-03fc-4b7c-9715-52c6746c86f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.038871] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e3183113-8fc2-408a-a77b-2ac28473154a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.052015] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fd9b2275-0d1a-4e49-8e70-93cbc1b3d645 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.065538] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance acc214aa-35e9-4302-89c7-9248bdda70f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.077552] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fc47b67f-b19a-4b9c-a6df-849c2d3c6797 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.091174] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4e1969dc-292e-4322-be26-de7d11c405fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.102312] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.112678] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.122035] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance a0e94a85-8b9b-4394-bbaa-cc21786d3d01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.132862] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.144354] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2ea0e9df-20df-4d6b-9214-c94e0b8f8468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.156168] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 66db9bd5-4f21-475a-be59-c38a4b45e43e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 979.156429] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 979.156592] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '25', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'io_workload': '10', 'num_proj_779314e2630246b98c8b6a11c3f71890': '1', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_task_spawning': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 979.523721] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2015d150-f02d-4eef-81ad-bf0806134602 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.530965] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b469449-ecfa-4383-abed-106b496ae8b8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.561459] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6059429c-f36a-41bb-b1e4-52bab947409b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.569777] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfc19c0-7178-4d49-9bcc-0083a3185d43 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.584798] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.596498] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.611729] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 979.611729] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.754s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.585017] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.585255] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.585421] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.585565] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 981.752945] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.753192] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.753827] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.747845] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.750082] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.856360] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7ab0422e-060d-4ab8-bb83-6b1828733582 tempest-ServerActionsTestOtherB-681975538 tempest-ServerActionsTestOtherB-681975538-project-member] Acquiring lock "b2749b84-ad2b-4e19-ab8f-4d4b3e157260" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.856360] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7ab0422e-060d-4ab8-bb83-6b1828733582 tempest-ServerActionsTestOtherB-681975538 tempest-ServerActionsTestOtherB-681975538-project-member] Lock "b2749b84-ad2b-4e19-ab8f-4d4b3e157260" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.113123] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Acquiring lock "b54ba09c-1148-490c-89c4-9dd210249220" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.113386] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "b54ba09c-1148-490c-89c4-9dd210249220" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.150680] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Acquiring lock "68805c65-f211-4018-a3a7-ea458ef817e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.150905] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "68805c65-f211-4018-a3a7-ea458ef817e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.193851] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Acquiring lock "96532039-0ec3-4852-87f7-6bdaa209f5c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.194361] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "96532039-0ec3-4852-87f7-6bdaa209f5c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.463911] env[61570]: WARNING oslo_vmware.rw_handles [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 990.463911] env[61570]: ERROR oslo_vmware.rw_handles [ 990.463911] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 990.464812] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 990.464812] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Copying Virtual Disk [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/35458d26-b869-4a6c-94f1-9e942a0e18d5/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 990.464812] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-270cb3e0-d532-4653-a689-a00021ba423e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.477916] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 990.477916] env[61570]: value = "task-4891334" [ 990.477916] env[61570]: _type = "Task" [ 990.477916] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.486652] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': task-4891334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.993287] env[61570]: DEBUG oslo_vmware.exceptions [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 990.993710] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.994289] env[61570]: ERROR nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 990.994289] env[61570]: Faults: ['InvalidArgument'] [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Traceback (most recent call last): [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] yield resources [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self.driver.spawn(context, instance, image_meta, [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self._fetch_image_if_missing(context, vi) [ 990.994289] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] image_cache(vi, tmp_image_ds_loc) [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] vm_util.copy_virtual_disk( [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] session._wait_for_task(vmdk_copy_task) [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return self.wait_for_task(task_ref) [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return evt.wait() [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] result = hub.switch() [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 990.994657] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return self.greenlet.switch() [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self.f(*self.args, **self.kw) [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] raise exceptions.translate_fault(task_info.error) [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Faults: ['InvalidArgument'] [ 990.994975] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] [ 990.994975] env[61570]: INFO nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Terminating instance [ 991.001548] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 991.003415] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.003781] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.004277] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.004857] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1899704e-fd27-4722-b653-77833e610c16 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.012060] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cda6126d-22b0-4cc0-bf63-023d97a36caf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.020649] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 991.022739] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cefc6f66-ace1-401a-bf02-8e9cbfea17b4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.025394] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.025394] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 991.025394] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf61db04-6c05-496e-be47-1d51570243e3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.032344] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for the task: (returnval){ [ 991.032344] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52e78efd-5ebf-39da-6b68-01083a07dd26" [ 991.032344] env[61570]: _type = "Task" [ 991.032344] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.041137] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52e78efd-5ebf-39da-6b68-01083a07dd26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.132779] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 991.133135] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 991.133216] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleting the datastore file [datastore2] ded35886-716c-4725-8fc9-cd6dfc04281a {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.133535] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3bc9881-a006-4af7-b499-3e39c75bcddc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.140209] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 991.140209] env[61570]: value = "task-4891336" [ 991.140209] env[61570]: _type = "Task" [ 991.140209] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.149549] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': task-4891336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.544967] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 991.545302] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Creating directory with path [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.545548] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d5bf6e-a744-4849-a6a7-0820ea6ea420 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.558753] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Created directory with path [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.559046] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Fetch image to [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 991.559244] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 991.560184] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae2651f-699b-4b56-a5dc-8a75f248a863 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.568053] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2808d6fd-0f58-467f-8686-96a1e726dc1a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.578049] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750a1869-7246-4cc0-bba5-677d744014c8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.613763] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9de87a-a56d-4396-aa77-ce6c05c31a07 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.621792] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7dc68346-9165-49ff-a752-fc371042f7cd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.647661] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 991.655409] env[61570]: DEBUG oslo_vmware.api [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': task-4891336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079477} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.655522] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.655669] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 991.655843] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.656032] env[61570]: INFO nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Took 0.65 seconds to destroy the instance on the hypervisor. [ 991.659587] env[61570]: DEBUG nova.compute.claims [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 991.659705] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.660020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.726473] env[61570]: DEBUG oslo_vmware.rw_handles [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 991.800769] env[61570]: DEBUG oslo_vmware.rw_handles [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 991.800851] env[61570]: DEBUG oslo_vmware.rw_handles [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 992.198076] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38212f33-ea8c-49d1-90e9-5545e13f5824 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.206108] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933083c7-8500-4054-9cfb-c76d2951b32c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.238528] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6e841d-01ef-41d2-ad8d-7a9a2d9cf410 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.246412] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aca61a-b70d-4b37-b200-4a0c7209db48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.261081] env[61570]: DEBUG nova.compute.provider_tree [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.269986] env[61570]: DEBUG nova.scheduler.client.report [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 992.287339] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.627s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.287680] env[61570]: ERROR nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.287680] env[61570]: Faults: ['InvalidArgument'] [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Traceback (most recent call last): [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self.driver.spawn(context, instance, image_meta, [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self._fetch_image_if_missing(context, vi) [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] image_cache(vi, tmp_image_ds_loc) [ 992.287680] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] vm_util.copy_virtual_disk( [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] session._wait_for_task(vmdk_copy_task) [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return self.wait_for_task(task_ref) [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return evt.wait() [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] result = hub.switch() [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] return self.greenlet.switch() [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 992.288066] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] self.f(*self.args, **self.kw) [ 992.288411] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 992.288411] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] raise exceptions.translate_fault(task_info.error) [ 992.288411] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.288411] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Faults: ['InvalidArgument'] [ 992.288411] env[61570]: ERROR nova.compute.manager [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] [ 992.288411] env[61570]: DEBUG nova.compute.utils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 992.290188] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Build of instance ded35886-716c-4725-8fc9-cd6dfc04281a was re-scheduled: A specified parameter was not correct: fileType [ 992.290188] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 992.290580] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 992.290808] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 992.290975] env[61570]: DEBUG nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 992.291154] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.845135] env[61570]: DEBUG nova.network.neutron [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.858376] env[61570]: INFO nova.compute.manager [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Took 0.57 seconds to deallocate network for instance. [ 992.990450] env[61570]: INFO nova.scheduler.client.report [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleted allocations for instance ded35886-716c-4725-8fc9-cd6dfc04281a [ 993.019246] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bd0fcf7d-bb74-42dd-b332-899f85c27d15 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 437.987s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.020642] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 237.430s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.020922] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.021224] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.021632] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.024294] env[61570]: INFO nova.compute.manager [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Terminating instance [ 993.026709] env[61570]: DEBUG nova.compute.manager [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 993.028352] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 993.028352] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c91594b2-5f6a-49c9-9ff2-e5e801777777 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.044985] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a11a506-cb32-4d55-a05d-393e8bef5558 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.062410] env[61570]: DEBUG nova.compute.manager [None req-36179c56-713d-4ff7-987d-775f4d0a69f6 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] [instance: 43599711-7de0-465c-a8ab-fc24d90ed9c2] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 993.082587] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ded35886-716c-4725-8fc9-cd6dfc04281a could not be found. [ 993.082773] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 993.082975] env[61570]: INFO nova.compute.manager [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 993.083245] env[61570]: DEBUG oslo.service.loopingcall [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.083508] env[61570]: DEBUG nova.compute.manager [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 993.083605] env[61570]: DEBUG nova.network.neutron [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.092413] env[61570]: DEBUG nova.compute.manager [None req-36179c56-713d-4ff7-987d-775f4d0a69f6 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] [instance: 43599711-7de0-465c-a8ab-fc24d90ed9c2] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 993.122799] env[61570]: DEBUG oslo_concurrency.lockutils [None req-36179c56-713d-4ff7-987d-775f4d0a69f6 tempest-SecurityGroupsTestJSON-710483461 tempest-SecurityGroupsTestJSON-710483461-project-member] Lock "43599711-7de0-465c-a8ab-fc24d90ed9c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.532s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.124849] env[61570]: DEBUG nova.network.neutron [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.134229] env[61570]: INFO nova.compute.manager [-] [instance: ded35886-716c-4725-8fc9-cd6dfc04281a] Took 0.05 seconds to deallocate network for instance. [ 993.136704] env[61570]: DEBUG nova.compute.manager [None req-d0dbfaea-127d-42c5-a05d-a12943d26870 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] [instance: f3914246-a7b8-46d3-b8c1-3c7254a30693] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 993.160444] env[61570]: DEBUG nova.compute.manager [None req-d0dbfaea-127d-42c5-a05d-a12943d26870 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] [instance: f3914246-a7b8-46d3-b8c1-3c7254a30693] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 993.182476] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d0dbfaea-127d-42c5-a05d-a12943d26870 tempest-ServersTestMultiNic-71975332 tempest-ServersTestMultiNic-71975332-project-member] Lock "f3914246-a7b8-46d3-b8c1-3c7254a30693" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.284s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.194835] env[61570]: DEBUG nova.compute.manager [None req-3109278e-54da-42f5-af5c-3800572a6f1a tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 1c76ec6f-08e0-4786-bcac-70fbc87fc789] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 993.221203] env[61570]: DEBUG nova.compute.manager [None req-3109278e-54da-42f5-af5c-3800572a6f1a tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 1c76ec6f-08e0-4786-bcac-70fbc87fc789] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 993.240087] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d647bda1-d988-4451-931a-a23e40d662df tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "ded35886-716c-4725-8fc9-cd6dfc04281a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.249207] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3109278e-54da-42f5-af5c-3800572a6f1a tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "1c76ec6f-08e0-4786-bcac-70fbc87fc789" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.449s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.260295] env[61570]: DEBUG nova.compute.manager [None req-35c5833e-5987-47b2-9d2e-3e5db7f4653b tempest-ServerRescueTestJSONUnderV235-1606670301 tempest-ServerRescueTestJSONUnderV235-1606670301-project-member] [instance: 94e9909d-31a5-4d2c-a12b-aaebb32d4445] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 993.287351] env[61570]: DEBUG nova.compute.manager [None req-35c5833e-5987-47b2-9d2e-3e5db7f4653b tempest-ServerRescueTestJSONUnderV235-1606670301 tempest-ServerRescueTestJSONUnderV235-1606670301-project-member] [instance: 94e9909d-31a5-4d2c-a12b-aaebb32d4445] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 993.314214] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35c5833e-5987-47b2-9d2e-3e5db7f4653b tempest-ServerRescueTestJSONUnderV235-1606670301 tempest-ServerRescueTestJSONUnderV235-1606670301-project-member] Lock "94e9909d-31a5-4d2c-a12b-aaebb32d4445" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.239s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.330921] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 993.405394] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.405766] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.407609] env[61570]: INFO nova.compute.claims [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.921121] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d0d8cc-546b-414a-802b-a501779c84ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.930173] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6dbed3-eb5f-41b1-941e-a22252cb2622 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.970324] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e609a38-642f-4481-b97e-269e4cab879c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.980265] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b650e7d-c958-4340-b6bc-132498d1c60f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.000024] env[61570]: DEBUG nova.compute.provider_tree [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.010137] env[61570]: DEBUG nova.scheduler.client.report [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.037903] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.632s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.038804] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 994.084287] env[61570]: DEBUG nova.compute.utils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.086653] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 994.086822] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 994.097251] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 994.172479] env[61570]: DEBUG nova.policy [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a7a7f44b98e4eacb56d11d43dc3cad4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '743eeefd02e04e63850742fc5590125f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 994.176776] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 994.223011] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.223411] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.223583] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.223873] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.224069] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.224232] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.224489] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.224667] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.224837] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.225078] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.225215] env[61570]: DEBUG nova.virt.hardware [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.226117] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0d3e11-306e-4977-b462-9c4cc807d759 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.234913] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fe39d8-9ef5-480b-940a-1cedf718c00c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.140451] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Successfully created port: 338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.817515] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "8ef1d751-e809-46e0-b98f-ac90ab076889" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.859359] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Successfully updated port: 338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.875762] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.876279] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.876279] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.927023] env[61570]: DEBUG nova.compute.manager [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Received event network-vif-plugged-338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 996.927718] env[61570]: DEBUG oslo_concurrency.lockutils [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] Acquiring lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.929581] env[61570]: DEBUG oslo_concurrency.lockutils [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.929581] env[61570]: DEBUG oslo_concurrency.lockutils [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.929581] env[61570]: DEBUG nova.compute.manager [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] No waiting events found dispatching network-vif-plugged-338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 996.929581] env[61570]: WARNING nova.compute.manager [req-b1f49aec-c929-496a-9250-1336d75ff6eb req-50d7495b-9da4-46ce-ab32-eeb4ab30ddf7 service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Received unexpected event network-vif-plugged-338b9f95-ddd4-4db1-a8b2-6183c00a9212 for instance with vm_state building and task_state deleting. [ 996.950122] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.381452] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updating instance_info_cache with network_info: [{"id": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "address": "fa:16:3e:b1:d6:fd", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338b9f95-dd", "ovs_interfaceid": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.407804] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.407804] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance network_info: |[{"id": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "address": "fa:16:3e:b1:d6:fd", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338b9f95-dd", "ovs_interfaceid": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 997.408064] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:d6:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '338b9f95-ddd4-4db1-a8b2-6183c00a9212', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.415661] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating folder: Project (743eeefd02e04e63850742fc5590125f). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 997.415889] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e6bdd5d-530b-4018-8cb6-67042ee7b50b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.427456] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created folder: Project (743eeefd02e04e63850742fc5590125f) in parent group-v953072. [ 997.427456] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating folder: Instances. Parent ref: group-v953131. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 997.427626] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee6e18e6-c974-436d-ad09-ee0f28c96f3e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.441059] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created folder: Instances in parent group-v953131. [ 997.441059] env[61570]: DEBUG oslo.service.loopingcall [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.441059] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 997.441059] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aca97a88-36ce-44c8-a860-eb7ae17554e3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.461045] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.461045] env[61570]: value = "task-4891339" [ 997.461045] env[61570]: _type = "Task" [ 997.461045] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.469118] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891339, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.972583] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891339, 'name': CreateVM_Task, 'duration_secs': 0.326307} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.972870] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 997.973547] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.973729] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.974065] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.974376] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd40a706-9c8d-4f28-bc6b-b751552ae21b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.979408] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 997.979408] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]520d37d3-1654-6e37-b397-edb601a7c9f9" [ 997.979408] env[61570]: _type = "Task" [ 997.979408] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.988348] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]520d37d3-1654-6e37-b397-edb601a7c9f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.490610] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.490861] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.491046] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.021558] env[61570]: DEBUG nova.compute.manager [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Received event network-changed-338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 999.021819] env[61570]: DEBUG nova.compute.manager [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Refreshing instance network info cache due to event network-changed-338b9f95-ddd4-4db1-a8b2-6183c00a9212. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 999.022608] env[61570]: DEBUG oslo_concurrency.lockutils [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] Acquiring lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.022608] env[61570]: DEBUG oslo_concurrency.lockutils [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] Acquired lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.022608] env[61570]: DEBUG nova.network.neutron [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Refreshing network info cache for port 338b9f95-ddd4-4db1-a8b2-6183c00a9212 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 999.486263] env[61570]: DEBUG nova.network.neutron [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updated VIF entry in instance network info cache for port 338b9f95-ddd4-4db1-a8b2-6183c00a9212. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 999.486263] env[61570]: DEBUG nova.network.neutron [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updating instance_info_cache with network_info: [{"id": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "address": "fa:16:3e:b1:d6:fd", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap338b9f95-dd", "ovs_interfaceid": "338b9f95-ddd4-4db1-a8b2-6183c00a9212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.501547] env[61570]: DEBUG oslo_concurrency.lockutils [req-8aca2a1b-4e8f-4aec-a114-c79cb8d7c420 req-0e2eac05-bf15-4f38-aa4d-c763d97eb3bd service nova] Releasing lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.719182] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.719609] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.173613] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0dec7974-69f0-4038-8170-9194c764ba51 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "fa47c527-5dc6-4162-b4d8-d8bab3f2b13c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.173903] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0dec7974-69f0-4038-8170-9194c764ba51 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "fa47c527-5dc6-4162-b4d8-d8bab3f2b13c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.860785] env[61570]: DEBUG oslo_concurrency.lockutils [None req-84723f06-ca83-46ac-b41f-2fcc576388d1 tempest-ServersTestFqdnHostnames-114570745 tempest-ServersTestFqdnHostnames-114570745-project-member] Acquiring lock "1f6709ec-fb9d-490d-beb8-53883fb533e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.860785] env[61570]: DEBUG oslo_concurrency.lockutils [None req-84723f06-ca83-46ac-b41f-2fcc576388d1 tempest-ServersTestFqdnHostnames-114570745 tempest-ServersTestFqdnHostnames-114570745-project-member] Lock "1f6709ec-fb9d-490d-beb8-53883fb533e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.996070] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b16b6e0b-1c3f-4c56-b272-460bf5836f44 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "f6b0686c-b81d-4b18-bacf-be573a28a277" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.996070] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b16b6e0b-1c3f-4c56-b272-460bf5836f44 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "f6b0686c-b81d-4b18-bacf-be573a28a277" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.523088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5cb87ca1-3ba6-418e-a820-ede9f99668fb tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "ef7c996b-f62f-4146-b48b-c865c362c12c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.523419] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5cb87ca1-3ba6-418e-a820-ede9f99668fb tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "ef7c996b-f62f-4146-b48b-c865c362c12c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.035235] env[61570]: DEBUG oslo_concurrency.lockutils [None req-44825749-21a6-40ee-873c-2b2a48aec073 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Acquiring lock "e9385963-cda4-4778-92d6-4a20722b34bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.035629] env[61570]: DEBUG oslo_concurrency.lockutils [None req-44825749-21a6-40ee-873c-2b2a48aec073 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "e9385963-cda4-4778-92d6-4a20722b34bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.841975] env[61570]: DEBUG oslo_concurrency.lockutils [None req-117e8b18-4008-44a6-b16f-2ddab1324259 tempest-ServerMetadataNegativeTestJSON-777350147 tempest-ServerMetadataNegativeTestJSON-777350147-project-member] Acquiring lock "b119eaef-9f58-46e6-9bc4-f47fa88d53c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.842269] env[61570]: DEBUG oslo_concurrency.lockutils [None req-117e8b18-4008-44a6-b16f-2ddab1324259 tempest-ServerMetadataNegativeTestJSON-777350147 tempest-ServerMetadataNegativeTestJSON-777350147-project-member] Lock "b119eaef-9f58-46e6-9bc4-f47fa88d53c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.083131] env[61570]: WARNING oslo_vmware.rw_handles [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.083131] env[61570]: ERROR oslo_vmware.rw_handles [ 1039.083754] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1039.085688] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1039.085913] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Copying Virtual Disk [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/22f46b0a-7b39-4f1b-8e4e-6c74538aca60/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1039.086223] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a051c44-99e6-482f-9c85-a98c62a9f8f7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.094952] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for the task: (returnval){ [ 1039.094952] env[61570]: value = "task-4891340" [ 1039.094952] env[61570]: _type = "Task" [ 1039.094952] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.104538] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Task: {'id': task-4891340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.605599] env[61570]: DEBUG oslo_vmware.exceptions [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1039.605893] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.606557] env[61570]: ERROR nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.606557] env[61570]: Faults: ['InvalidArgument'] [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Traceback (most recent call last): [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] yield resources [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self.driver.spawn(context, instance, image_meta, [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self._fetch_image_if_missing(context, vi) [ 1039.606557] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] image_cache(vi, tmp_image_ds_loc) [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] vm_util.copy_virtual_disk( [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] session._wait_for_task(vmdk_copy_task) [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return self.wait_for_task(task_ref) [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return evt.wait() [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] result = hub.switch() [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1039.606932] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return self.greenlet.switch() [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self.f(*self.args, **self.kw) [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] raise exceptions.translate_fault(task_info.error) [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Faults: ['InvalidArgument'] [ 1039.607287] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] [ 1039.607287] env[61570]: INFO nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Terminating instance [ 1039.608664] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.608877] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.609540] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1039.609743] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1039.609972] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ca5aed7-4fb7-4f89-963c-39b350dc19e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.612276] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6463f7c-2965-463d-a564-e8c2b95e4ebd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.619442] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1039.619674] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23a62b51-b314-4e76-9330-51cd899e27a2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.621906] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.622092] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1039.623050] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5263a32c-1a74-4c15-9e37-4cdc72514213 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.628076] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for the task: (returnval){ [ 1039.628076] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a4d5a7-ad38-e2a3-afdc-ec2652286795" [ 1039.628076] env[61570]: _type = "Task" [ 1039.628076] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.635836] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a4d5a7-ad38-e2a3-afdc-ec2652286795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.692528] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1039.692777] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1039.692960] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Deleting the datastore file [datastore2] 12435283-c350-4d85-be82-1c85e1ea17be {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.693247] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0d600f0-24e5-4efa-83d8-1f12d68d2f7f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.700098] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for the task: (returnval){ [ 1039.700098] env[61570]: value = "task-4891342" [ 1039.700098] env[61570]: _type = "Task" [ 1039.700098] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.709721] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Task: {'id': task-4891342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.138782] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1040.139020] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Creating directory with path [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.139162] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3a76780-acf7-4d0e-bae9-9058e0773631 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.150952] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Created directory with path [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.151212] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Fetch image to [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1040.151379] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1040.152146] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9516653-8a1c-44a8-9dbc-06b3b3ed56f0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.159619] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86e4601-9dd3-4868-ab37-c90bf9a67960 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.169259] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d80da4-d132-4c85-8d56-4127c046f960 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.201296] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6abc1a1-c040-424a-8d08-234be653f7a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.214094] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d328aefe-ce24-4acb-8599-067a076c1110 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.215138] env[61570]: DEBUG oslo_vmware.api [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Task: {'id': task-4891342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074338} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.215366] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.215546] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1040.215714] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1040.215886] env[61570]: INFO nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1040.218838] env[61570]: DEBUG nova.compute.claims [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1040.219020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.219235] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.239052] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1040.302494] env[61570]: DEBUG oslo_vmware.rw_handles [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1040.366377] env[61570]: DEBUG oslo_vmware.rw_handles [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1040.366584] env[61570]: DEBUG oslo_vmware.rw_handles [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1040.651324] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ccc1bb-8f80-40e5-aa0c-15ec035506ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.660236] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931e8f8f-bb64-42db-8d5d-ff0a6a45bc37 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.689504] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bf3fc1-9805-4784-9cfa-febccc6b81c9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.696616] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f223ad60-830a-417a-90db-be1a1de18f15 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.709509] env[61570]: DEBUG nova.compute.provider_tree [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.719754] env[61570]: DEBUG nova.scheduler.client.report [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.733623] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.514s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.734202] env[61570]: ERROR nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.734202] env[61570]: Faults: ['InvalidArgument'] [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Traceback (most recent call last): [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self.driver.spawn(context, instance, image_meta, [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self._fetch_image_if_missing(context, vi) [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] image_cache(vi, tmp_image_ds_loc) [ 1040.734202] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] vm_util.copy_virtual_disk( [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] session._wait_for_task(vmdk_copy_task) [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return self.wait_for_task(task_ref) [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return evt.wait() [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] result = hub.switch() [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] return self.greenlet.switch() [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1040.734598] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] self.f(*self.args, **self.kw) [ 1040.735096] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1040.735096] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] raise exceptions.translate_fault(task_info.error) [ 1040.735096] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.735096] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Faults: ['InvalidArgument'] [ 1040.735096] env[61570]: ERROR nova.compute.manager [instance: 12435283-c350-4d85-be82-1c85e1ea17be] [ 1040.735096] env[61570]: DEBUG nova.compute.utils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1040.736476] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Build of instance 12435283-c350-4d85-be82-1c85e1ea17be was re-scheduled: A specified parameter was not correct: fileType [ 1040.736476] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1040.736838] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1040.737017] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1040.737199] env[61570]: DEBUG nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1040.737364] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1040.752268] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.752421] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1040.752619] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1040.775022] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775190] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775280] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775405] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775538] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775657] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775775] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.775892] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.776016] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1040.776163] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1040.776659] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.776866] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.776975] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1040.777165] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.787452] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.787670] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.787832] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.787983] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1040.789099] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39abb90-069c-406b-97a5-ff0199a1d5aa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.799402] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0639ada-c1cb-4dd9-a3fd-65f81791de6d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.814645] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1018bd50-ef9b-4829-af85-f58ef20e91e2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.822022] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76603c67-cd5f-4ee6-8b1b-03964203b432 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.852989] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180591MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1040.853168] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.853364] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.938017] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 12435283-c350-4d85-be82-1c85e1ea17be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1040.938097] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938195] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938330] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938451] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938566] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938679] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938791] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.938904] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.939027] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1040.950321] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1040.961334] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1040.971169] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance a0e94a85-8b9b-4394-bbaa-cc21786d3d01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1040.983742] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1040.993906] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2ea0e9df-20df-4d6b-9214-c94e0b8f8468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.005273] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 66db9bd5-4f21-475a-be59-c38a4b45e43e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.015657] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b2749b84-ad2b-4e19-ab8f-4d4b3e157260 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.027034] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b54ba09c-1148-490c-89c4-9dd210249220 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.038659] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 68805c65-f211-4018-a3a7-ea458ef817e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.049847] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 96532039-0ec3-4852-87f7-6bdaa209f5c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.060302] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.070617] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fa47c527-5dc6-4162-b4d8-d8bab3f2b13c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.080521] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1f6709ec-fb9d-490d-beb8-53883fb533e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.090879] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ef7c996b-f62f-4146-b48b-c865c362c12c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.100963] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e9385963-cda4-4778-92d6-4a20722b34bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.112522] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b119eaef-9f58-46e6-9bc4-f47fa88d53c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1041.112756] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1041.112912] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '30', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '8', 'num_os_type_None': '9', 'num_proj_1820e4eb4e7d4d62938f022b7a1c8fc4': '1', 'io_workload': '9', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_task_spawning': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1041.424714] env[61570]: DEBUG nova.network.neutron [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.439994] env[61570]: INFO nova.compute.manager [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Took 0.70 seconds to deallocate network for instance. [ 1041.525747] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ef1bd8-2174-4903-a6a5-451928b24b61 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.540863] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771cf710-c63f-43e1-9f91-98bbb6647896 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.576412] env[61570]: INFO nova.scheduler.client.report [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Deleted allocations for instance 12435283-c350-4d85-be82-1c85e1ea17be [ 1041.582153] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb7f836-22d5-4a9c-9ce9-2eb67f133c69 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.592695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a293954-b505-4e01-be43-f90b581c8269 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.607126] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.609031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2c91c40a-0559-4593-871e-505c8aa75ed2 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 484.710s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.610106] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 285.128s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.610393] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Acquiring lock "12435283-c350-4d85-be82-1c85e1ea17be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.610524] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.610698] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.612677] env[61570]: INFO nova.compute.manager [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Terminating instance [ 1041.614588] env[61570]: DEBUG nova.compute.manager [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1041.614778] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1041.615329] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f17f686c-6557-4459-841e-325c2ab771a5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.618210] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.628264] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b306f993-a080-4833-9dc6-937b2dff0dc2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.643021] env[61570]: DEBUG nova.compute.manager [None req-1e9ef257-88df-4f81-9763-241e70538856 tempest-AttachInterfacesUnderV243Test-1001287889 tempest-AttachInterfacesUnderV243Test-1001287889-project-member] [instance: c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.643021] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1041.643021] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.789s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.668180] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 12435283-c350-4d85-be82-1c85e1ea17be could not be found. [ 1041.668180] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1041.668180] env[61570]: INFO nova.compute.manager [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1041.668180] env[61570]: DEBUG oslo.service.loopingcall [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.668444] env[61570]: DEBUG nova.compute.manager [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1041.672028] env[61570]: DEBUG nova.network.neutron [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1041.672028] env[61570]: DEBUG nova.compute.manager [None req-1e9ef257-88df-4f81-9763-241e70538856 tempest-AttachInterfacesUnderV243Test-1001287889 tempest-AttachInterfacesUnderV243Test-1001287889-project-member] [instance: c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.701850] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e9ef257-88df-4f81-9763-241e70538856 tempest-AttachInterfacesUnderV243Test-1001287889 tempest-AttachInterfacesUnderV243Test-1001287889-project-member] Lock "c7085baf-4dfc-4d9b-abcd-02e7fd3c4fa9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.042s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.703463] env[61570]: DEBUG nova.network.neutron [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.712223] env[61570]: DEBUG nova.compute.manager [None req-f5d44e55-fff1-42ea-ba13-7fedd2a9133d tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 0dc0f5b5-03fc-4b7c-9715-52c6746c86f3] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.715047] env[61570]: INFO nova.compute.manager [-] [instance: 12435283-c350-4d85-be82-1c85e1ea17be] Took 0.05 seconds to deallocate network for instance. [ 1041.772025] env[61570]: DEBUG nova.compute.manager [None req-f5d44e55-fff1-42ea-ba13-7fedd2a9133d tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 0dc0f5b5-03fc-4b7c-9715-52c6746c86f3] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.793850] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f5d44e55-fff1-42ea-ba13-7fedd2a9133d tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "0dc0f5b5-03fc-4b7c-9715-52c6746c86f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.177s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.816445] env[61570]: DEBUG nova.compute.manager [None req-58511ce7-ca84-4e0b-8b6b-2cd65cfd4483 tempest-ServersNegativeTestJSON-1355265268 tempest-ServersNegativeTestJSON-1355265268-project-member] [instance: e3183113-8fc2-408a-a77b-2ac28473154a] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.825889] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5179c07c-ca33-40a6-ac15-647592099705 tempest-ServersV294TestFqdnHostnames-1520237946 tempest-ServersV294TestFqdnHostnames-1520237946-project-member] Lock "12435283-c350-4d85-be82-1c85e1ea17be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.841098] env[61570]: DEBUG nova.compute.manager [None req-58511ce7-ca84-4e0b-8b6b-2cd65cfd4483 tempest-ServersNegativeTestJSON-1355265268 tempest-ServersNegativeTestJSON-1355265268-project-member] [instance: e3183113-8fc2-408a-a77b-2ac28473154a] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.861737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58511ce7-ca84-4e0b-8b6b-2cd65cfd4483 tempest-ServersNegativeTestJSON-1355265268 tempest-ServersNegativeTestJSON-1355265268-project-member] Lock "e3183113-8fc2-408a-a77b-2ac28473154a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.366s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.872825] env[61570]: DEBUG nova.compute.manager [None req-c8ab77fe-f962-4721-80bb-1b16d24059ef tempest-InstanceActionsTestJSON-892237482 tempest-InstanceActionsTestJSON-892237482-project-member] [instance: fd9b2275-0d1a-4e49-8e70-93cbc1b3d645] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.896876] env[61570]: DEBUG nova.compute.manager [None req-c8ab77fe-f962-4721-80bb-1b16d24059ef tempest-InstanceActionsTestJSON-892237482 tempest-InstanceActionsTestJSON-892237482-project-member] [instance: fd9b2275-0d1a-4e49-8e70-93cbc1b3d645] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.919253] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c8ab77fe-f962-4721-80bb-1b16d24059ef tempest-InstanceActionsTestJSON-892237482 tempest-InstanceActionsTestJSON-892237482-project-member] Lock "fd9b2275-0d1a-4e49-8e70-93cbc1b3d645" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.370s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.928593] env[61570]: DEBUG nova.compute.manager [None req-4c9e7cee-49f6-4d7e-9623-aa7392b6b0ed tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: acc214aa-35e9-4302-89c7-9248bdda70f8] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.956835] env[61570]: DEBUG nova.compute.manager [None req-4c9e7cee-49f6-4d7e-9623-aa7392b6b0ed tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: acc214aa-35e9-4302-89c7-9248bdda70f8] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.978708] env[61570]: DEBUG oslo_concurrency.lockutils [None req-4c9e7cee-49f6-4d7e-9623-aa7392b6b0ed tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "acc214aa-35e9-4302-89c7-9248bdda70f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.078s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.990243] env[61570]: DEBUG nova.compute.manager [None req-b45199b5-b53d-45af-91a3-57b0146e6186 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: fc47b67f-b19a-4b9c-a6df-849c2d3c6797] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.014327] env[61570]: DEBUG nova.compute.manager [None req-b45199b5-b53d-45af-91a3-57b0146e6186 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: fc47b67f-b19a-4b9c-a6df-849c2d3c6797] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.037055] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b45199b5-b53d-45af-91a3-57b0146e6186 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "fc47b67f-b19a-4b9c-a6df-849c2d3c6797" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.637s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.048011] env[61570]: DEBUG nova.compute.manager [None req-5f71f08d-b970-46ff-ad19-cfd36fff56aa tempest-ServerActionsV293TestJSON-832661754 tempest-ServerActionsV293TestJSON-832661754-project-member] [instance: 4e1969dc-292e-4322-be26-de7d11c405fa] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.072889] env[61570]: DEBUG nova.compute.manager [None req-5f71f08d-b970-46ff-ad19-cfd36fff56aa tempest-ServerActionsV293TestJSON-832661754 tempest-ServerActionsV293TestJSON-832661754-project-member] [instance: 4e1969dc-292e-4322-be26-de7d11c405fa] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1042.093824] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5f71f08d-b970-46ff-ad19-cfd36fff56aa tempest-ServerActionsV293TestJSON-832661754 tempest-ServerActionsV293TestJSON-832661754-project-member] Lock "4e1969dc-292e-4322-be26-de7d11c405fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.090s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.119506] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1042.182814] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.183082] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.184624] env[61570]: INFO nova.compute.claims [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.560990] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7b4060-43e2-44a0-bc48-acbed9dfdaa4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.569216] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daae33e-ee78-472e-9e8e-748e535c768c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.600225] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c211ae-9645-4fe0-a7e6-0c6bc5f92a49 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.608307] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708b9728-1428-48af-90d8-e8408828f5b6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.621546] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.621945] env[61570]: DEBUG nova.compute.provider_tree [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.631778] env[61570]: DEBUG nova.scheduler.client.report [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.653785] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.471s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.654382] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1042.718553] env[61570]: DEBUG nova.compute.utils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1042.719897] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1042.720100] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.748729] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1042.754454] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.754454] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.818649] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1042.840358] env[61570]: DEBUG nova.policy [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd87c59eede9c4bf186380f9f37a2caaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21713d88a2d5483f89ae59404d3aa235', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1042.846022] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.846022] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.846022] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.846220] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.846220] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.846220] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.846220] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.846220] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.846368] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.846636] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.847076] env[61570]: DEBUG nova.virt.hardware [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.848059] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb05bb47-696d-4fff-8fa7-648131a37117 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.859085] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb3d6fd-fb75-408d-8943-d5662b85e4ce {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.237787] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Successfully created port: 31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.753188] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.044942] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Successfully updated port: 31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.067428] env[61570]: DEBUG nova.compute.manager [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Received event network-vif-plugged-31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1044.067657] env[61570]: DEBUG oslo_concurrency.lockutils [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] Acquiring lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.067864] env[61570]: DEBUG oslo_concurrency.lockutils [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.068046] env[61570]: DEBUG oslo_concurrency.lockutils [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.068291] env[61570]: DEBUG nova.compute.manager [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] No waiting events found dispatching network-vif-plugged-31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1044.068377] env[61570]: WARNING nova.compute.manager [req-ac5fe179-dedc-43e6-b52f-e127e5e82d7f req-5bb3493e-ec21-4cac-b50e-e26c929da15a service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Received unexpected event network-vif-plugged-31f1b2a2-c635-4000-b033-829745ea0fea for instance with vm_state building and task_state spawning. [ 1044.071676] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.072052] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.072052] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.149330] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.158088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.408055] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Updating instance_info_cache with network_info: [{"id": "31f1b2a2-c635-4000-b033-829745ea0fea", "address": "fa:16:3e:a2:bb:d9", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31f1b2a2-c6", "ovs_interfaceid": "31f1b2a2-c635-4000-b033-829745ea0fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.423199] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.423405] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance network_info: |[{"id": "31f1b2a2-c635-4000-b033-829745ea0fea", "address": "fa:16:3e:a2:bb:d9", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31f1b2a2-c6", "ovs_interfaceid": "31f1b2a2-c635-4000-b033-829745ea0fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.424464] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:bb:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31f1b2a2-c635-4000-b033-829745ea0fea', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.434401] env[61570]: DEBUG oslo.service.loopingcall [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.434877] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1044.435144] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c610547e-9f33-486e-9f2e-8a55e46fc07b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.457264] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.457264] env[61570]: value = "task-4891343" [ 1044.457264] env[61570]: _type = "Task" [ 1044.457264] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.465535] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891343, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.748277] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.969016] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891343, 'name': CreateVM_Task, 'duration_secs': 0.298577} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.969364] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1044.969884] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.970124] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.970463] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.970710] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caee22d3-eaaf-4bdd-a5bd-cab6c71dece4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.975568] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 1044.975568] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5247f89a-9dbe-74de-b84d-2d3ab758cc57" [ 1044.975568] env[61570]: _type = "Task" [ 1044.975568] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.984049] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5247f89a-9dbe-74de-b84d-2d3ab758cc57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.487012] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.487284] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.487526] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.094063] env[61570]: DEBUG nova.compute.manager [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Received event network-changed-31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1046.094314] env[61570]: DEBUG nova.compute.manager [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Refreshing instance network info cache due to event network-changed-31f1b2a2-c635-4000-b033-829745ea0fea. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1046.094518] env[61570]: DEBUG oslo_concurrency.lockutils [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] Acquiring lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.094649] env[61570]: DEBUG oslo_concurrency.lockutils [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] Acquired lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.094805] env[61570]: DEBUG nova.network.neutron [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Refreshing network info cache for port 31f1b2a2-c635-4000-b033-829745ea0fea {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.361529] env[61570]: DEBUG nova.network.neutron [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Updated VIF entry in instance network info cache for port 31f1b2a2-c635-4000-b033-829745ea0fea. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.361883] env[61570]: DEBUG nova.network.neutron [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Updating instance_info_cache with network_info: [{"id": "31f1b2a2-c635-4000-b033-829745ea0fea", "address": "fa:16:3e:a2:bb:d9", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31f1b2a2-c6", "ovs_interfaceid": "31f1b2a2-c635-4000-b033-829745ea0fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.371901] env[61570]: DEBUG oslo_concurrency.lockutils [req-8085dfb8-44d4-4f58-891d-9cc6f7251928 req-d8dbf9dc-88be-4428-ac72-da36c72a10d1 service nova] Releasing lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.100128] env[61570]: WARNING oslo_vmware.rw_handles [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1089.100128] env[61570]: ERROR oslo_vmware.rw_handles [ 1089.100794] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1089.103153] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1089.103422] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Copying Virtual Disk [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/15c0120c-9abc-47df-bc1c-8b077fe89a29/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1089.103757] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76e1986a-d647-4034-93b8-f10656acef1e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.112250] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for the task: (returnval){ [ 1089.112250] env[61570]: value = "task-4891344" [ 1089.112250] env[61570]: _type = "Task" [ 1089.112250] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.121306] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Task: {'id': task-4891344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.623252] env[61570]: DEBUG oslo_vmware.exceptions [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1089.623648] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.624416] env[61570]: ERROR nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.624416] env[61570]: Faults: ['InvalidArgument'] [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Traceback (most recent call last): [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] yield resources [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self.driver.spawn(context, instance, image_meta, [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self._fetch_image_if_missing(context, vi) [ 1089.624416] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] image_cache(vi, tmp_image_ds_loc) [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] vm_util.copy_virtual_disk( [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] session._wait_for_task(vmdk_copy_task) [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return self.wait_for_task(task_ref) [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return evt.wait() [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] result = hub.switch() [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1089.624897] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return self.greenlet.switch() [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self.f(*self.args, **self.kw) [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] raise exceptions.translate_fault(task_info.error) [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Faults: ['InvalidArgument'] [ 1089.625460] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] [ 1089.625460] env[61570]: INFO nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Terminating instance [ 1089.627036] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.627036] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.627036] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b63ff50-613a-47ac-a52e-b03519dbebdb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.629636] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1089.629868] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1089.630615] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd02c190-794b-4fe5-b2d5-aa21c4927c31 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.638533] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1089.638787] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac9f84bf-34a6-4bec-baa7-255a46213d8b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.641318] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.641490] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1089.642491] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5efd670b-8835-421a-b193-622f76254773 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.648582] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1089.648582] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52afa4f1-6ba2-619e-021c-5a59c83a5f01" [ 1089.648582] env[61570]: _type = "Task" [ 1089.648582] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.657203] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52afa4f1-6ba2-619e-021c-5a59c83a5f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.718161] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1089.718369] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1089.718477] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Deleting the datastore file [datastore2] e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.718768] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8b09d47-e24c-4987-b801-f0d7814514e4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.725912] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for the task: (returnval){ [ 1089.725912] env[61570]: value = "task-4891346" [ 1089.725912] env[61570]: _type = "Task" [ 1089.725912] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.735815] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Task: {'id': task-4891346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.159707] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1090.159707] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating directory with path [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.159707] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e84e7e9b-284e-4fc7-9bc5-1af5be591810 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.173057] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created directory with path [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.173167] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Fetch image to [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1090.173300] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1090.174534] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82d365f-013a-4151-9826-9ae0a62bfd40 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.183039] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3510f2-f08b-45be-83a4-bdf492b1bc5a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.192678] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91826f0d-f5f0-4399-809a-6cbcb0a0076d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.233309] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702cab74-4ad4-42e1-8237-47e89d6e0221 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.242915] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-50a3986e-fff3-4407-8827-42c8c953c672 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.244910] env[61570]: DEBUG oslo_vmware.api [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Task: {'id': task-4891346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080659} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.245182] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.245364] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1090.245539] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1090.245733] env[61570]: INFO nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1090.248110] env[61570]: DEBUG nova.compute.claims [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1090.248400] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.248527] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.272694] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1090.334870] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1090.393434] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1090.393632] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1090.664752] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05bce17-1438-49b0-8277-50bc9fb1df23 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.672767] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067d2009-b01c-4789-9838-5a48362ba1db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.706424] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a83df10-0ded-44ef-914a-75ab89fa2a7d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.715244] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b965da-862b-444b-aeaa-1fe17654d48f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.730156] env[61570]: DEBUG nova.compute.provider_tree [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.739989] env[61570]: DEBUG nova.scheduler.client.report [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.755196] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.506s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.755891] env[61570]: ERROR nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.755891] env[61570]: Faults: ['InvalidArgument'] [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Traceback (most recent call last): [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self.driver.spawn(context, instance, image_meta, [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self._fetch_image_if_missing(context, vi) [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] image_cache(vi, tmp_image_ds_loc) [ 1090.755891] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] vm_util.copy_virtual_disk( [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] session._wait_for_task(vmdk_copy_task) [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return self.wait_for_task(task_ref) [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return evt.wait() [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] result = hub.switch() [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] return self.greenlet.switch() [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1090.756502] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] self.f(*self.args, **self.kw) [ 1090.756925] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1090.756925] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] raise exceptions.translate_fault(task_info.error) [ 1090.756925] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1090.756925] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Faults: ['InvalidArgument'] [ 1090.756925] env[61570]: ERROR nova.compute.manager [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] [ 1090.756925] env[61570]: DEBUG nova.compute.utils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1090.759027] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Build of instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 was re-scheduled: A specified parameter was not correct: fileType [ 1090.759027] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1090.759413] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1090.759590] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1090.759765] env[61570]: DEBUG nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1090.759932] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.211829] env[61570]: DEBUG nova.network.neutron [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.223702] env[61570]: INFO nova.compute.manager [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Took 0.46 seconds to deallocate network for instance. [ 1091.329808] env[61570]: INFO nova.scheduler.client.report [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Deleted allocations for instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 [ 1091.362628] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a369b67a-281a-42db-9e46-b8a63c949edb tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 520.850s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.363947] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 322.517s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.364196] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Acquiring lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.364407] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.364664] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.367331] env[61570]: INFO nova.compute.manager [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Terminating instance [ 1091.369354] env[61570]: DEBUG nova.compute.manager [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1091.372072] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.372072] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44a90c19-5e60-4daf-8033-2c3b155b1617 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.379829] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1554b6-60b2-4df1-97f4-870bbacb4077 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.391401] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1091.417712] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e95d1ce4-26b0-496e-8a74-fc0a8ab00d96 could not be found. [ 1091.417927] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.418115] env[61570]: INFO nova.compute.manager [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1091.418368] env[61570]: DEBUG oslo.service.loopingcall [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.418630] env[61570]: DEBUG nova.compute.manager [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1091.418716] env[61570]: DEBUG nova.network.neutron [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.447347] env[61570]: DEBUG nova.network.neutron [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.460254] env[61570]: INFO nova.compute.manager [-] [instance: e95d1ce4-26b0-496e-8a74-fc0a8ab00d96] Took 0.04 seconds to deallocate network for instance. [ 1091.476264] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.476636] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.478177] env[61570]: INFO nova.compute.claims [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.562116] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ce2ad0af-e615-455b-b694-f80c2b220fab tempest-ServerTagsTestJSON-245361709 tempest-ServerTagsTestJSON-245361709-project-member] Lock "e95d1ce4-26b0-496e-8a74-fc0a8ab00d96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.823445] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a6e9de-c3a4-402a-88c9-4e4cb13011ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.831287] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a825d8f-da38-4d6b-b045-e4dc7b18c043 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.861111] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54ac33c-33e7-416f-bd37-d2702fc1dbcc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.868867] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3504f1b-7156-425c-9ca1-f76fd3335d5a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.883203] env[61570]: DEBUG nova.compute.provider_tree [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.894046] env[61570]: DEBUG nova.scheduler.client.report [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.907602] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.431s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.908098] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1091.940712] env[61570]: DEBUG nova.compute.utils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1091.942281] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1091.942365] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1091.952151] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1092.019443] env[61570]: DEBUG nova.policy [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab2f35e4a6b744db8470656aed0cc984', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34fecdc3cc7f47fdba241831e5f27f53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1092.022936] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1092.050857] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1092.051205] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1092.051398] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.051631] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1092.051806] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.051977] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1092.052232] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1092.052422] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1092.052638] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1092.052859] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1092.053077] env[61570]: DEBUG nova.virt.hardware [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1092.054331] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eacc7a2-c732-4529-9bd5-7fa86414bb58 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.064029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bfec75-1557-479d-9082-f9f7a3b01d10 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.464139] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Successfully created port: 5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.414983] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Successfully updated port: 5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.421516] env[61570]: DEBUG nova.compute.manager [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Received event network-vif-plugged-5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1093.421776] env[61570]: DEBUG oslo_concurrency.lockutils [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] Acquiring lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.421988] env[61570]: DEBUG oslo_concurrency.lockutils [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.422178] env[61570]: DEBUG oslo_concurrency.lockutils [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.422348] env[61570]: DEBUG nova.compute.manager [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] No waiting events found dispatching network-vif-plugged-5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1093.422539] env[61570]: WARNING nova.compute.manager [req-fcd26347-59cf-44b7-9722-12553bb4b85c req-a6917050-c7ac-4467-8865-14543faa0d62 service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Received unexpected event network-vif-plugged-5408bc82-270f-4a39-a81e-282e0718c52f for instance with vm_state building and task_state spawning. [ 1093.439999] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.439999] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.440210] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.513041] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.578811] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.791144] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updating instance_info_cache with network_info: [{"id": "5408bc82-270f-4a39-a81e-282e0718c52f", "address": "fa:16:3e:2d:f0:f6", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5408bc82-27", "ovs_interfaceid": "5408bc82-270f-4a39-a81e-282e0718c52f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.810899] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.811295] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance network_info: |[{"id": "5408bc82-270f-4a39-a81e-282e0718c52f", "address": "fa:16:3e:2d:f0:f6", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5408bc82-27", "ovs_interfaceid": "5408bc82-270f-4a39-a81e-282e0718c52f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1093.811643] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:f0:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5408bc82-270f-4a39-a81e-282e0718c52f', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.819442] env[61570]: DEBUG oslo.service.loopingcall [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.820106] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1093.820307] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dc7dd17-61d6-46f3-81f1-8bd4f3fc190c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.840572] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.840572] env[61570]: value = "task-4891347" [ 1093.840572] env[61570]: _type = "Task" [ 1093.840572] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.850195] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891347, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.352348] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891347, 'name': CreateVM_Task, 'duration_secs': 0.279772} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.353086] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1094.353554] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.353714] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.354068] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1094.355104] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-873715d6-343f-4bb8-b83a-3e31f5ac6f0c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.360488] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 1094.360488] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5279f186-b8c5-28f4-4cbd-4baf6d15ca67" [ 1094.360488] env[61570]: _type = "Task" [ 1094.360488] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.369634] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5279f186-b8c5-28f4-4cbd-4baf6d15ca67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.872135] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.872521] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.872660] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.572320] env[61570]: DEBUG nova.compute.manager [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Received event network-changed-5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1095.572571] env[61570]: DEBUG nova.compute.manager [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Refreshing instance network info cache due to event network-changed-5408bc82-270f-4a39-a81e-282e0718c52f. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1095.572966] env[61570]: DEBUG oslo_concurrency.lockutils [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] Acquiring lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.573392] env[61570]: DEBUG oslo_concurrency.lockutils [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] Acquired lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.573494] env[61570]: DEBUG nova.network.neutron [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Refreshing network info cache for port 5408bc82-270f-4a39-a81e-282e0718c52f {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1095.894502] env[61570]: DEBUG nova.network.neutron [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updated VIF entry in instance network info cache for port 5408bc82-270f-4a39-a81e-282e0718c52f. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1095.894884] env[61570]: DEBUG nova.network.neutron [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updating instance_info_cache with network_info: [{"id": "5408bc82-270f-4a39-a81e-282e0718c52f", "address": "fa:16:3e:2d:f0:f6", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5408bc82-27", "ovs_interfaceid": "5408bc82-270f-4a39-a81e-282e0718c52f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.904038] env[61570]: DEBUG oslo_concurrency.lockutils [req-6a9aebca-910e-4b06-9a5b-33aedb4190b4 req-29fe031e-577a-46f8-9cfd-4f3b68513a6e service nova] Releasing lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.753722] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.754091] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1097.771664] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1100.771183] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.771536] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1100.771576] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1100.796852] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797061] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797214] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797346] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797469] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797590] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797708] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797826] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.797942] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.798077] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1100.798239] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1100.798768] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.798914] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1100.799091] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.809997] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.810244] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.810413] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.810568] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1100.811639] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6716e7f8-e688-4d96-87e5-215c0537146e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.820879] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61417976-1af4-4612-a5d7-8a93b1d42c3a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.835621] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d467174-60c9-4794-aa74-f73aafef461c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.842855] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faa2d3b-0117-4cc9-9fd5-d0d75b535327 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.873924] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180587MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1100.874084] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.874274] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.027309] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.027528] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.027661] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.027786] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.027905] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.028106] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.028235] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.028353] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.028493] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.028621] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1101.042153] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance a0e94a85-8b9b-4394-bbaa-cc21786d3d01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.056940] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.069346] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2ea0e9df-20df-4d6b-9214-c94e0b8f8468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.081425] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 66db9bd5-4f21-475a-be59-c38a4b45e43e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.094710] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b2749b84-ad2b-4e19-ab8f-4d4b3e157260 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.106459] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b54ba09c-1148-490c-89c4-9dd210249220 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.118691] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 68805c65-f211-4018-a3a7-ea458ef817e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.131107] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 96532039-0ec3-4852-87f7-6bdaa209f5c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.142025] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.155664] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fa47c527-5dc6-4162-b4d8-d8bab3f2b13c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.166345] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1f6709ec-fb9d-490d-beb8-53883fb533e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.177877] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ef7c996b-f62f-4146-b48b-c865c362c12c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.187826] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e9385963-cda4-4778-92d6-4a20722b34bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.198201] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b119eaef-9f58-46e6-9bc4-f47fa88d53c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1101.198435] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1101.198597] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '39', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '2', 'io_workload': '10', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_task_spawning': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1101.501722] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce988092-eea5-4eba-9265-7a8f51e58cb6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.509669] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0555f02b-eda0-4958-a1f5-6a77c6469432 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.543729] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1e5e3d-ef15-49f7-82f0-08b60efca340 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.552667] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edc0877-9594-49eb-9963-de3f9186f0d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.566659] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.576025] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.592947] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1101.593208] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.719s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.753409] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.761511] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.753506] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.753752] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.753908] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.753677] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.748805] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.753610] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.753867] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1108.758587] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.165627] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_power_states {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.188572] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 1121.188572] env[61570]: value = "domain-c8" [ 1121.188572] env[61570]: _type = "ClusterComputeResource" [ 1121.188572] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1121.191294] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbc67f9-013a-4dde-b0b3-190504a45363 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.209915] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 10 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1121.210117] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid aa2e5125-24fb-4476-a585-df838c8cf4d2 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.210323] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 0a37f623-f757-4f67-a796-a8e17cfb9496 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.210481] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 1435e51b-58b8-406f-9def-f9e6e7bffd8a {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.210635] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid c6acb8ba-7877-44c9-a78b-f15fc6d47b28 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.210788] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid e4f4573c-040a-49d6-ba20-e051a265b3e4 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.210935] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid df50c085-3eee-44c2-8d14-263f3bf49b2d {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.211098] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 0b77e196-4948-4a76-8e87-75e9b1e5df55 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.211257] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 8ef1d751-e809-46e0-b98f-ac90ab076889 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.211404] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid ddab6060-65d2-4ecc-b4ff-b57271af9d9e {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.211549] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 60ecef78-bcc2-42ab-bdba-83e8009dbe98 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1121.211901] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.212153] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "0a37f623-f757-4f67-a796-a8e17cfb9496" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.212366] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.212644] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.213068] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.213068] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.213240] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.213430] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "8ef1d751-e809-46e0-b98f-ac90ab076889" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.213619] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.214253] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.932418] env[61570]: WARNING oslo_vmware.rw_handles [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1138.932418] env[61570]: ERROR oslo_vmware.rw_handles [ 1138.933422] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1138.935247] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1138.935484] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Copying Virtual Disk [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/6521b019-1fcb-4df2-98e7-8361295af23e/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1138.935763] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39a840ab-b9f4-4100-8fdb-fc68c3799341 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.944119] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1138.944119] env[61570]: value = "task-4891348" [ 1138.944119] env[61570]: _type = "Task" [ 1138.944119] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.953171] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.454814] env[61570]: DEBUG oslo_vmware.exceptions [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1139.455127] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.455726] env[61570]: ERROR nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1139.455726] env[61570]: Faults: ['InvalidArgument'] [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Traceback (most recent call last): [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] yield resources [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self.driver.spawn(context, instance, image_meta, [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self._fetch_image_if_missing(context, vi) [ 1139.455726] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] image_cache(vi, tmp_image_ds_loc) [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] vm_util.copy_virtual_disk( [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] session._wait_for_task(vmdk_copy_task) [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return self.wait_for_task(task_ref) [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return evt.wait() [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] result = hub.switch() [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1139.456177] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return self.greenlet.switch() [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self.f(*self.args, **self.kw) [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] raise exceptions.translate_fault(task_info.error) [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Faults: ['InvalidArgument'] [ 1139.456520] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] [ 1139.456520] env[61570]: INFO nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Terminating instance [ 1139.457683] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.457863] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.458165] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2500f3f-a56a-4a08-98d3-1cf933693ed7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.460351] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1139.460540] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1139.461274] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44a3dfe-e381-4368-83ff-1756102e299c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.468391] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1139.468630] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82227d5d-8cd9-47e5-b723-2d9d81e9d8e4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.470850] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.471031] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1139.471985] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8250a954-93ce-4936-973f-d3fd30dc4960 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.477694] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1139.477694] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52187dcd-191a-1140-ad89-42d67a092c15" [ 1139.477694] env[61570]: _type = "Task" [ 1139.477694] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.485350] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52187dcd-191a-1140-ad89-42d67a092c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.969599] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1139.969599] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1139.969599] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleting the datastore file [datastore2] aa2e5125-24fb-4476-a585-df838c8cf4d2 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1139.969599] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edc8f5b0-7f40-45ca-9c11-2d62adfb83bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.977089] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1139.977089] env[61570]: value = "task-4891350" [ 1139.977089] env[61570]: _type = "Task" [ 1139.977089] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.990034] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.992663] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1139.992914] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating directory with path [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.993194] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c02099e2-ad42-4a05-a17a-f4967f1b2283 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.015050] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Created directory with path [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.015195] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Fetch image to [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1140.015569] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1140.016695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb74605b-e080-4ea5-9268-159c7d3fa697 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.025183] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23de2fa-00c2-41dc-b652-680816b2317d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.034994] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a347a1a-2d35-4d4f-b1a1-8d34980b8d12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.070935] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff88a10c-2670-4be4-ba1b-9c01631dc979 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.077988] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5925ca34-3e55-413f-80fe-8070343e8b51 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.105304] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1140.161369] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1140.225975] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1140.226228] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1140.488079] env[61570]: DEBUG oslo_vmware.api [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09261} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.488383] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.488570] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1140.488741] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1140.488914] env[61570]: INFO nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Took 1.03 seconds to destroy the instance on the hypervisor. [ 1140.491369] env[61570]: DEBUG nova.compute.claims [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1140.491547] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.491755] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.848029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ba9145-4912-4445-9e16-3bd0f52e812a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.856450] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36df9c72-478c-49b5-98d7-8f361f151b45 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.888028] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e676cec-59a8-4656-9093-c95922877883 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.896245] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275d071e-34cb-4923-a1ed-3233a1e5c102 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.910249] env[61570]: DEBUG nova.compute.provider_tree [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.921018] env[61570]: DEBUG nova.scheduler.client.report [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1140.940812] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.449s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.941378] env[61570]: ERROR nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1140.941378] env[61570]: Faults: ['InvalidArgument'] [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Traceback (most recent call last): [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self.driver.spawn(context, instance, image_meta, [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self._fetch_image_if_missing(context, vi) [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] image_cache(vi, tmp_image_ds_loc) [ 1140.941378] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] vm_util.copy_virtual_disk( [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] session._wait_for_task(vmdk_copy_task) [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return self.wait_for_task(task_ref) [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return evt.wait() [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] result = hub.switch() [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] return self.greenlet.switch() [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1140.941740] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] self.f(*self.args, **self.kw) [ 1140.942076] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1140.942076] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] raise exceptions.translate_fault(task_info.error) [ 1140.942076] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1140.942076] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Faults: ['InvalidArgument'] [ 1140.942076] env[61570]: ERROR nova.compute.manager [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] [ 1140.942231] env[61570]: DEBUG nova.compute.utils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1140.944198] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Build of instance aa2e5125-24fb-4476-a585-df838c8cf4d2 was re-scheduled: A specified parameter was not correct: fileType [ 1140.944198] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1140.944619] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1140.944746] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1140.944915] env[61570]: DEBUG nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1140.945092] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.356844] env[61570]: DEBUG nova.network.neutron [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.376110] env[61570]: INFO nova.compute.manager [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Took 0.43 seconds to deallocate network for instance. [ 1141.513015] env[61570]: INFO nova.scheduler.client.report [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleted allocations for instance aa2e5125-24fb-4476-a585-df838c8cf4d2 [ 1141.534983] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26d96d59-f827-405b-9bc7-1e7f4e2879d2 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 568.134s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.536370] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 171.994s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.536486] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.536827] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.536943] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.539024] env[61570]: INFO nova.compute.manager [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Terminating instance [ 1141.540848] env[61570]: DEBUG nova.compute.manager [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1141.541093] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1141.541572] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be58ab9c-5363-437c-9dbb-f6269c689057 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.550656] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e79920-a6ab-44af-a167-da732701d037 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.561929] env[61570]: DEBUG nova.compute.manager [None req-53ac5f31-d284-40f7-b864-f11fe21d7650 tempest-ServerShowV254Test-616399010 tempest-ServerShowV254Test-616399010-project-member] [instance: a0e94a85-8b9b-4394-bbaa-cc21786d3d01] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1141.588186] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa2e5125-24fb-4476-a585-df838c8cf4d2 could not be found. [ 1141.588407] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1141.588624] env[61570]: INFO nova.compute.manager [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1141.588885] env[61570]: DEBUG oslo.service.loopingcall [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.589134] env[61570]: DEBUG nova.compute.manager [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1141.589228] env[61570]: DEBUG nova.network.neutron [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.593979] env[61570]: DEBUG nova.compute.manager [None req-53ac5f31-d284-40f7-b864-f11fe21d7650 tempest-ServerShowV254Test-616399010 tempest-ServerShowV254Test-616399010-project-member] [instance: a0e94a85-8b9b-4394-bbaa-cc21786d3d01] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1141.617648] env[61570]: DEBUG oslo_concurrency.lockutils [None req-53ac5f31-d284-40f7-b864-f11fe21d7650 tempest-ServerShowV254Test-616399010 tempest-ServerShowV254Test-616399010-project-member] Lock "a0e94a85-8b9b-4394-bbaa-cc21786d3d01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.619129] env[61570]: DEBUG nova.network.neutron [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.627893] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1141.630584] env[61570]: INFO nova.compute.manager [-] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] Took 0.04 seconds to deallocate network for instance. [ 1141.690309] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.690628] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.692431] env[61570]: INFO nova.compute.claims [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.420529] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b9a6e994-75dc-47e3-9a95-3117bff578df tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.884s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.421383] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.209s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.421575] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: aa2e5125-24fb-4476-a585-df838c8cf4d2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1142.421746] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "aa2e5125-24fb-4476-a585-df838c8cf4d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.754457] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7d3c93-14e9-4293-8b79-caa825acc6c9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.762739] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c87509-afc7-4e99-9681-c1615e999774 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.792788] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02654e1c-c8c9-42bf-81dd-da4c37140f2d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.800739] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37734625-1200-48a2-a486-ab80b2a9154e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.814426] env[61570]: DEBUG nova.compute.provider_tree [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.824644] env[61570]: DEBUG nova.scheduler.client.report [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.840189] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.149s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.840688] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1142.878954] env[61570]: DEBUG nova.compute.utils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1142.880217] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1142.880394] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1142.892639] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1142.960613] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1142.964426] env[61570]: DEBUG nova.policy [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64b6978ae10491da72d3f00d9cf0496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41f716cba1d94cf28a341cc027112585', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1143.005559] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:51:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='7e3e563c-4237-4968-bafa-dc37462c92ee',id=27,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1076062376',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1143.005870] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1143.006081] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.006313] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1143.006492] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.006642] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1143.006853] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1143.007015] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1143.007190] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1143.007351] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1143.007524] env[61570]: DEBUG nova.virt.hardware [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1143.008507] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e40b05-aaf3-4b2c-8f2f-9f2ccffd2fcf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.017937] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e316b5-dcaf-4df3-83f3-a220d7d28fd0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.453687] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Successfully created port: ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.175310] env[61570]: DEBUG nova.compute.manager [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Received event network-vif-plugged-ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1144.175310] env[61570]: DEBUG oslo_concurrency.lockutils [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] Acquiring lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.175310] env[61570]: DEBUG oslo_concurrency.lockutils [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.175310] env[61570]: DEBUG oslo_concurrency.lockutils [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.178318] env[61570]: DEBUG nova.compute.manager [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] No waiting events found dispatching network-vif-plugged-ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1144.178575] env[61570]: WARNING nova.compute.manager [req-17b27686-911a-49b9-aa50-e1925ac6b257 req-99d14f18-8e65-4bac-8aed-4a270b63298b service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Received unexpected event network-vif-plugged-ff19378c-8f8f-48ef-8f49-bcce664584e7 for instance with vm_state building and task_state spawning. [ 1144.317428] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Successfully updated port: ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1144.342611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.342611] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.342611] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1144.398338] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1144.704681] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Updating instance_info_cache with network_info: [{"id": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "address": "fa:16:3e:9e:32:88", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff19378c-8f", "ovs_interfaceid": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.722280] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.722599] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance network_info: |[{"id": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "address": "fa:16:3e:9e:32:88", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff19378c-8f", "ovs_interfaceid": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1144.723008] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:32:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff19378c-8f8f-48ef-8f49-bcce664584e7', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.730441] env[61570]: DEBUG oslo.service.loopingcall [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.730959] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1144.731204] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa4b813b-0855-4321-b051-8e3d72348be3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.752152] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.752152] env[61570]: value = "task-4891351" [ 1144.752152] env[61570]: _type = "Task" [ 1144.752152] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.761134] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891351, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.262623] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891351, 'name': CreateVM_Task, 'duration_secs': 0.305893} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.262825] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1145.263508] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.263694] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.263990] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1145.264561] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d967fdb-1992-48ca-b08c-125944a1f518 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.269723] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 1145.269723] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5227fd48-560e-c0b5-8dec-a12114811e9f" [ 1145.269723] env[61570]: _type = "Task" [ 1145.269723] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.277961] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5227fd48-560e-c0b5-8dec-a12114811e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.781306] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.781306] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.781306] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.209340] env[61570]: DEBUG nova.compute.manager [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Received event network-changed-ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1146.209531] env[61570]: DEBUG nova.compute.manager [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Refreshing instance network info cache due to event network-changed-ff19378c-8f8f-48ef-8f49-bcce664584e7. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1146.209745] env[61570]: DEBUG oslo_concurrency.lockutils [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] Acquiring lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.209890] env[61570]: DEBUG oslo_concurrency.lockutils [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] Acquired lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.210054] env[61570]: DEBUG nova.network.neutron [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Refreshing network info cache for port ff19378c-8f8f-48ef-8f49-bcce664584e7 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1146.668570] env[61570]: DEBUG nova.network.neutron [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Updated VIF entry in instance network info cache for port ff19378c-8f8f-48ef-8f49-bcce664584e7. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1146.668941] env[61570]: DEBUG nova.network.neutron [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Updating instance_info_cache with network_info: [{"id": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "address": "fa:16:3e:9e:32:88", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.136", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff19378c-8f", "ovs_interfaceid": "ff19378c-8f8f-48ef-8f49-bcce664584e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.684837] env[61570]: DEBUG oslo_concurrency.lockutils [req-45fd1f28-18ae-4f95-ab62-e3a16afc3026 req-47cad667-8308-4c3a-b301-c41a7db9a574 service nova] Releasing lock "refresh_cache-9cf357dd-3b71-4c76-8feb-04b9145dd4f4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.650774] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.851959] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "db38d263-aa3d-46b1-a13d-1469155fad84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.852192] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.489603] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.753225] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.753500] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1160.753597] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.764463] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.764701] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.764866] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.765030] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1160.768887] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03161704-c931-4e52-88a4-8d30f76992b5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.778259] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064ac2d3-7fb1-494a-beda-d5314af00498 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.797931] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbcc6c4-762c-4f70-a026-a038efc143c1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.806180] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db438da2-ea9a-466d-9811-34b12d9ba25d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.847087] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180597MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1160.847285] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.847464] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.943994] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944174] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944298] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944417] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944531] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944647] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944759] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.944869] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.945687] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.945687] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1160.962087] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b2749b84-ad2b-4e19-ab8f-4d4b3e157260 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1160.983989] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b54ba09c-1148-490c-89c4-9dd210249220 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1160.998755] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 68805c65-f211-4018-a3a7-ea458ef817e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.011285] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 96532039-0ec3-4852-87f7-6bdaa209f5c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.029848] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.042048] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fa47c527-5dc6-4162-b4d8-d8bab3f2b13c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.058723] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1f6709ec-fb9d-490d-beb8-53883fb533e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.071939] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ef7c996b-f62f-4146-b48b-c865c362c12c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.086705] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e9385963-cda4-4778-92d6-4a20722b34bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.102390] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b119eaef-9f58-46e6-9bc4-f47fa88d53c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.111779] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "c69c8589-88e1-481e-87b8-55608322440c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.111779] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.119269] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.133982] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1161.134449] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1161.134526] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '41', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_f90aebcb3272478fa4a680a56504d1b7': '1', 'io_workload': '10', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1161.153402] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1161.170797] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1161.171069] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1161.187051] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1161.208445] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1161.618809] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844d4758-2843-46c2-ae34-f2600e6c70ae {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.627096] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b6892b-eaf1-4569-b22e-6bdc2fce97a9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.669444] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1a2bf7-18f8-4ffc-84b7-5ba5dde7f26a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.676371] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a2560-f39e-4a9a-843b-90aaf118c1d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.691040] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.701692] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1161.721874] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1161.721874] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.874s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.721584] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.721902] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1162.721902] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1162.749553] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.749918] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750082] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750190] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750312] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750429] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750547] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750664] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750777] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.750890] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1162.751031] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1162.753594] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.752630] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.752942] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.753657] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.753964] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.042849] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "5f5232f8-60f4-472f-ab6e-6273904481e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.043113] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.748600] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.678264] env[61570]: WARNING oslo_vmware.rw_handles [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1189.678264] env[61570]: ERROR oslo_vmware.rw_handles [ 1189.678264] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1189.680375] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1189.680417] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Copying Virtual Disk [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/a7dd859f-c10a-4fcf-995e-1edf5701d901/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1189.680874] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fce11401-00f1-4419-9d71-b9db0064c358 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.689969] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1189.689969] env[61570]: value = "task-4891352" [ 1189.689969] env[61570]: _type = "Task" [ 1189.689969] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.700194] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.207404] env[61570]: DEBUG oslo_vmware.exceptions [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1190.207528] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.208119] env[61570]: ERROR nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1190.208119] env[61570]: Faults: ['InvalidArgument'] [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Traceback (most recent call last): [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] yield resources [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self.driver.spawn(context, instance, image_meta, [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self._fetch_image_if_missing(context, vi) [ 1190.208119] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] image_cache(vi, tmp_image_ds_loc) [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] vm_util.copy_virtual_disk( [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] session._wait_for_task(vmdk_copy_task) [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return self.wait_for_task(task_ref) [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return evt.wait() [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] result = hub.switch() [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1190.208529] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return self.greenlet.switch() [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self.f(*self.args, **self.kw) [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] raise exceptions.translate_fault(task_info.error) [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Faults: ['InvalidArgument'] [ 1190.209899] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] [ 1190.209899] env[61570]: INFO nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Terminating instance [ 1190.212739] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.212971] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.213778] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1190.213852] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1190.214111] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-188fe7a1-6029-4094-b3b8-ede65818a1fc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.216607] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624ecf45-8563-4412-bd5f-06e1d891f235 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.225505] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1190.225804] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f850c15-dde1-4ff8-bc80-113f1a3d8b25 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.228715] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.228898] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1190.230471] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5daefc25-3a32-459f-a8dc-6c7b92e6f19a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.237642] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for the task: (returnval){ [ 1190.237642] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5219923d-ee6b-b9d3-e774-063cb83a30df" [ 1190.237642] env[61570]: _type = "Task" [ 1190.237642] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.247027] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5219923d-ee6b-b9d3-e774-063cb83a30df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.299166] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1190.299608] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1190.299608] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleting the datastore file [datastore2] 0a37f623-f757-4f67-a796-a8e17cfb9496 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1190.299973] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b8241a4-e0aa-49bc-ad2d-40eb0af7e311 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.307232] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for the task: (returnval){ [ 1190.307232] env[61570]: value = "task-4891354" [ 1190.307232] env[61570]: _type = "Task" [ 1190.307232] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.319219] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.749501] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1190.749835] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Creating directory with path [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.750089] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3534221e-3b74-4bfa-8c43-9c5399f283b6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.766298] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Created directory with path [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.766519] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Fetch image to [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1190.766692] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1190.768673] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c205c71a-eb13-43db-8cd2-4ba4d1de96db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.775470] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a49cf32-1136-4c90-a16f-749fc1b017db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.785610] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf10b658-faa3-4157-9ba4-1ab2c7e5e612 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.821168] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbd5d50-38fd-4aed-83ab-f6626abcb3f0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.829294] env[61570]: DEBUG oslo_vmware.api [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Task: {'id': task-4891354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083269} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.830855] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.830855] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1190.831181] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1190.831256] env[61570]: INFO nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1190.833240] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8faf56b7-09e7-406b-b480-d68cd883f9cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.835143] env[61570]: DEBUG nova.compute.claims [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1190.835310] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.836179] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.863681] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1191.042496] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1191.112914] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1191.113131] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1191.246239] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9399b6-f81b-4379-af67-e63bd16b7ce6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.254039] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed90ec89-9430-4da5-bf6d-fdc1194b92ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.286227] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2e2d25-029e-4628-b1a5-128b562e4d38 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.294349] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76db78c-1e17-4afa-8a1a-2567b0f192ae {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.308634] env[61570]: DEBUG nova.compute.provider_tree [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.320973] env[61570]: DEBUG nova.scheduler.client.report [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1191.338272] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.503s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.338835] env[61570]: ERROR nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.338835] env[61570]: Faults: ['InvalidArgument'] [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Traceback (most recent call last): [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self.driver.spawn(context, instance, image_meta, [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self._fetch_image_if_missing(context, vi) [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] image_cache(vi, tmp_image_ds_loc) [ 1191.338835] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] vm_util.copy_virtual_disk( [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] session._wait_for_task(vmdk_copy_task) [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return self.wait_for_task(task_ref) [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return evt.wait() [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] result = hub.switch() [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] return self.greenlet.switch() [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1191.339247] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] self.f(*self.args, **self.kw) [ 1191.339651] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1191.339651] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] raise exceptions.translate_fault(task_info.error) [ 1191.339651] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.339651] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Faults: ['InvalidArgument'] [ 1191.339651] env[61570]: ERROR nova.compute.manager [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] [ 1191.339651] env[61570]: DEBUG nova.compute.utils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1191.341396] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Build of instance 0a37f623-f757-4f67-a796-a8e17cfb9496 was re-scheduled: A specified parameter was not correct: fileType [ 1191.341396] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1191.341787] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1191.341960] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1191.342149] env[61570]: DEBUG nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1191.342317] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1191.835273] env[61570]: DEBUG nova.network.neutron [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.851685] env[61570]: INFO nova.compute.manager [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Took 0.51 seconds to deallocate network for instance. [ 1191.967203] env[61570]: INFO nova.scheduler.client.report [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Deleted allocations for instance 0a37f623-f757-4f67-a796-a8e17cfb9496 [ 1192.003891] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3b8d2dca-6b89-4253-8c5a-5b1f727bed09 tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.673s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.005028] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.750s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.005028] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Acquiring lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.005209] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.005869] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.008202] env[61570]: INFO nova.compute.manager [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Terminating instance [ 1192.010251] env[61570]: DEBUG nova.compute.manager [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1192.010343] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1192.010841] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8297ba5-e988-41bd-ab6c-ffd60c105d3f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.022498] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa8521f-8617-4dee-a392-1aac585283d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.034259] env[61570]: DEBUG nova.compute.manager [None req-6f26fceb-8d46-4d6f-9579-56bea14629a0 tempest-ServersTestManualDisk-769530409 tempest-ServersTestManualDisk-769530409-project-member] [instance: 2ea0e9df-20df-4d6b-9214-c94e0b8f8468] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.058505] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a37f623-f757-4f67-a796-a8e17cfb9496 could not be found. [ 1192.058727] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1192.058910] env[61570]: INFO nova.compute.manager [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1192.059188] env[61570]: DEBUG oslo.service.loopingcall [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1192.059452] env[61570]: DEBUG nova.compute.manager [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1192.059546] env[61570]: DEBUG nova.network.neutron [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1192.075033] env[61570]: DEBUG nova.compute.manager [None req-6f26fceb-8d46-4d6f-9579-56bea14629a0 tempest-ServersTestManualDisk-769530409 tempest-ServersTestManualDisk-769530409-project-member] [instance: 2ea0e9df-20df-4d6b-9214-c94e0b8f8468] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.093774] env[61570]: DEBUG nova.network.neutron [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.104301] env[61570]: INFO nova.compute.manager [-] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] Took 0.04 seconds to deallocate network for instance. [ 1192.108083] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6f26fceb-8d46-4d6f-9579-56bea14629a0 tempest-ServersTestManualDisk-769530409 tempest-ServersTestManualDisk-769530409-project-member] Lock "2ea0e9df-20df-4d6b-9214-c94e0b8f8468" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.874s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.119176] env[61570]: DEBUG nova.compute.manager [None req-d05e5743-9351-419d-9efd-a451321c2296 tempest-InstanceActionsV221TestJSON-1159670729 tempest-InstanceActionsV221TestJSON-1159670729-project-member] [instance: 66db9bd5-4f21-475a-be59-c38a4b45e43e] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.145412] env[61570]: DEBUG nova.compute.manager [None req-d05e5743-9351-419d-9efd-a451321c2296 tempest-InstanceActionsV221TestJSON-1159670729 tempest-InstanceActionsV221TestJSON-1159670729-project-member] [instance: 66db9bd5-4f21-475a-be59-c38a4b45e43e] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.185398] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d05e5743-9351-419d-9efd-a451321c2296 tempest-InstanceActionsV221TestJSON-1159670729 tempest-InstanceActionsV221TestJSON-1159670729-project-member] Lock "66db9bd5-4f21-475a-be59-c38a4b45e43e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.779s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.199503] env[61570]: DEBUG nova.compute.manager [None req-7ab0422e-060d-4ab8-bb83-6b1828733582 tempest-ServerActionsTestOtherB-681975538 tempest-ServerActionsTestOtherB-681975538-project-member] [instance: b2749b84-ad2b-4e19-ab8f-4d4b3e157260] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.217717] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7958d99b-849f-4b2b-a235-9b178fd6deac tempest-ServersAdminTestJSON-597179595 tempest-ServersAdminTestJSON-597179595-project-member] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.218644] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 71.006s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.218829] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0a37f623-f757-4f67-a796-a8e17cfb9496] During sync_power_state the instance has a pending task (deleting). Skip. [ 1192.219057] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "0a37f623-f757-4f67-a796-a8e17cfb9496" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.228441] env[61570]: DEBUG nova.compute.manager [None req-7ab0422e-060d-4ab8-bb83-6b1828733582 tempest-ServerActionsTestOtherB-681975538 tempest-ServerActionsTestOtherB-681975538-project-member] [instance: b2749b84-ad2b-4e19-ab8f-4d4b3e157260] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.258646] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7ab0422e-060d-4ab8-bb83-6b1828733582 tempest-ServerActionsTestOtherB-681975538 tempest-ServerActionsTestOtherB-681975538-project-member] Lock "b2749b84-ad2b-4e19-ab8f-4d4b3e157260" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.403s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.270524] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: b54ba09c-1148-490c-89c4-9dd210249220] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.332514] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: b54ba09c-1148-490c-89c4-9dd210249220] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.366894] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "b54ba09c-1148-490c-89c4-9dd210249220" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.253s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.383744] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: 68805c65-f211-4018-a3a7-ea458ef817e3] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.407802] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: 68805c65-f211-4018-a3a7-ea458ef817e3] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.435597] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "68805c65-f211-4018-a3a7-ea458ef817e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.284s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.451293] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: 96532039-0ec3-4852-87f7-6bdaa209f5c9] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.506389] env[61570]: DEBUG nova.compute.manager [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] [instance: 96532039-0ec3-4852-87f7-6bdaa209f5c9] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1192.543900] env[61570]: DEBUG oslo_concurrency.lockutils [None req-647cd005-7d28-47b3-b0a3-830e25b5b822 tempest-ListServersNegativeTestJSON-1256781884 tempest-ListServersNegativeTestJSON-1256781884-project-member] Lock "96532039-0ec3-4852-87f7-6bdaa209f5c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.349s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.557532] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1192.619216] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.619710] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.621009] env[61570]: INFO nova.compute.claims [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1192.933949] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdead162-ff96-4049-b22c-209074965a6d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.942124] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d436cf9-11ab-470d-a3f8-963a4e767a27 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.973112] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1a28a9-3454-486b-9a1f-59529ce44698 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.981366] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eea7308-c6ba-4714-9547-48a30343aeb1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.995319] env[61570]: DEBUG nova.compute.provider_tree [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.004847] env[61570]: DEBUG nova.scheduler.client.report [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1193.018795] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.399s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.019321] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1193.054976] env[61570]: DEBUG nova.compute.utils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1193.056660] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1193.056858] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1193.069541] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1193.136766] env[61570]: DEBUG nova.policy [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b125133d0ef345c9a51def48b2cf52b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eccce396bddd42cc931ff5c0e6850b49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1193.152108] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1193.177338] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1193.177589] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1193.178046] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.178046] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1193.178294] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.178485] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1193.178763] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1193.178945] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1193.179141] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1193.179355] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1193.179567] env[61570]: DEBUG nova.virt.hardware [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1193.180483] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8bf2a0-fd6d-4baa-b9a0-f5a533cd26b1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.189858] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b34a5d-4391-45d6-ae61-d6994cd5bf66 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.198585] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.198836] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.583128] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Successfully created port: 690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1194.368014] env[61570]: DEBUG nova.compute.manager [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Received event network-vif-plugged-690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1194.368264] env[61570]: DEBUG oslo_concurrency.lockutils [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] Acquiring lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.368435] env[61570]: DEBUG oslo_concurrency.lockutils [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.369025] env[61570]: DEBUG oslo_concurrency.lockutils [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.369025] env[61570]: DEBUG nova.compute.manager [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] No waiting events found dispatching network-vif-plugged-690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1194.369025] env[61570]: WARNING nova.compute.manager [req-bfcc68d3-67d4-405d-b1b0-4351502f6c63 req-74f0ff3d-67e5-40e9-bee1-f9293728253b service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Received unexpected event network-vif-plugged-690a918f-0072-4de0-ae84-745a964fbff1 for instance with vm_state building and task_state spawning. [ 1194.474504] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Successfully updated port: 690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.488567] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.488728] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquired lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.488885] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1194.567532] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1194.612966] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.613210] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.815925] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Updating instance_info_cache with network_info: [{"id": "690a918f-0072-4de0-ae84-745a964fbff1", "address": "fa:16:3e:11:52:82", "network": {"id": "b2fedceb-54f9-4731-950c-18a7be38116e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2012622398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eccce396bddd42cc931ff5c0e6850b49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap690a918f-00", "ovs_interfaceid": "690a918f-0072-4de0-ae84-745a964fbff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.832228] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Releasing lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.833027] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance network_info: |[{"id": "690a918f-0072-4de0-ae84-745a964fbff1", "address": "fa:16:3e:11:52:82", "network": {"id": "b2fedceb-54f9-4731-950c-18a7be38116e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2012622398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eccce396bddd42cc931ff5c0e6850b49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap690a918f-00", "ovs_interfaceid": "690a918f-0072-4de0-ae84-745a964fbff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1194.833215] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:52:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '690a918f-0072-4de0-ae84-745a964fbff1', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1194.840673] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Creating folder: Project (eccce396bddd42cc931ff5c0e6850b49). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1194.841366] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00677cb9-6c4c-470f-bbd1-7e00ece9f83f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.854087] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Created folder: Project (eccce396bddd42cc931ff5c0e6850b49) in parent group-v953072. [ 1194.854317] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Creating folder: Instances. Parent ref: group-v953137. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1194.854535] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0de5b8c-ce80-4305-a3e7-4c9bd825ebc7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.864278] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Created folder: Instances in parent group-v953137. [ 1194.864528] env[61570]: DEBUG oslo.service.loopingcall [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.864717] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1194.864925] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-864cce72-7bec-46ea-9bb1-835d4bf08ab2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.884678] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1194.884678] env[61570]: value = "task-4891357" [ 1194.884678] env[61570]: _type = "Task" [ 1194.884678] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.893537] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891357, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.395341] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891357, 'name': CreateVM_Task, 'duration_secs': 0.312823} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.395760] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1195.402451] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.402625] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.402957] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1195.403235] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023dcac6-34c9-4f1b-a729-921f634ef8de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.408040] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for the task: (returnval){ [ 1195.408040] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526d6b5d-5894-5ce6-bc01-5cb2abc9bef5" [ 1195.408040] env[61570]: _type = "Task" [ 1195.408040] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.415817] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526d6b5d-5894-5ce6-bc01-5cb2abc9bef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.919183] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.919524] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.919812] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.398509] env[61570]: DEBUG nova.compute.manager [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Received event network-changed-690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1196.398759] env[61570]: DEBUG nova.compute.manager [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Refreshing instance network info cache due to event network-changed-690a918f-0072-4de0-ae84-745a964fbff1. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1196.398927] env[61570]: DEBUG oslo_concurrency.lockutils [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] Acquiring lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.399080] env[61570]: DEBUG oslo_concurrency.lockutils [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] Acquired lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.399239] env[61570]: DEBUG nova.network.neutron [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Refreshing network info cache for port 690a918f-0072-4de0-ae84-745a964fbff1 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1196.685474] env[61570]: DEBUG nova.network.neutron [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Updated VIF entry in instance network info cache for port 690a918f-0072-4de0-ae84-745a964fbff1. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1196.685876] env[61570]: DEBUG nova.network.neutron [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Updating instance_info_cache with network_info: [{"id": "690a918f-0072-4de0-ae84-745a964fbff1", "address": "fa:16:3e:11:52:82", "network": {"id": "b2fedceb-54f9-4731-950c-18a7be38116e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2012622398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eccce396bddd42cc931ff5c0e6850b49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap690a918f-00", "ovs_interfaceid": "690a918f-0072-4de0-ae84-745a964fbff1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.697489] env[61570]: DEBUG oslo_concurrency.lockutils [req-cad258dd-0d68-40d3-a587-ae8ef295bbbc req-ee59351b-26ac-42d2-a3a5-8e596639323e service nova] Releasing lock "refresh_cache-f9d0b44c-a338-495e-8ed2-9c79813671fe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.344179] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.249707] env[61570]: DEBUG oslo_concurrency.lockutils [None req-191b5b41-9199-425c-8347-62df22c71112 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "8fa50d72-12ee-4345-9ad7-07896fe34776" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.249707] env[61570]: DEBUG oslo_concurrency.lockutils [None req-191b5b41-9199-425c-8347-62df22c71112 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "8fa50d72-12ee-4345-9ad7-07896fe34776" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.799097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1d9a1c27-6250-4d1c-806e-dd57bd45d341 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "ea7cddf9-4529-4716-bc4e-8490e3f5ef83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.799526] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1d9a1c27-6250-4d1c-806e-dd57bd45d341 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "ea7cddf9-4529-4716-bc4e-8490e3f5ef83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.731195] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c0100984-04e9-4b0c-803d-fcd42a8d8322 tempest-ServersTestJSON-1682873444 tempest-ServersTestJSON-1682873444-project-member] Acquiring lock "3dec2bfa-16bf-423d-9ce3-16da3d9e9397" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.731489] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c0100984-04e9-4b0c-803d-fcd42a8d8322 tempest-ServersTestJSON-1682873444 tempest-ServersTestJSON-1682873444-project-member] Lock "3dec2bfa-16bf-423d-9ce3-16da3d9e9397" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.752882] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.753164] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1221.753289] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.768488] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.768727] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.768899] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.769068] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1221.770448] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8204040-67b8-485b-b1b6-e971b9bb4342 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.781387] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd02d4d8-ddee-47da-a468-d55bb1bc6c34 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.797215] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e031ab3-61c4-4bbd-b0ca-ad6503fc4ad5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.806017] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a57d15-f1c8-42c7-903e-bf3d36390293 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.844224] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180595MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1221.844379] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.844626] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.950495] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.950667] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.950797] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.950920] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951050] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951174] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951292] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951410] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951527] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.951640] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1221.966328] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance b119eaef-9f58-46e6-9bc4-f47fa88d53c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.980654] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1221.992916] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.004993] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.017557] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.029819] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.041838] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8fa50d72-12ee-4345-9ad7-07896fe34776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.052763] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ea7cddf9-4529-4716-bc4e-8490e3f5ef83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.064669] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 3dec2bfa-16bf-423d-9ce3-16da3d9e9397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1222.064922] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1222.065105] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '48', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_cf6825d6d7de4a6f88c5aa497feacb1c': '1', 'io_workload': '10', 'num_proj_5922737c204d481fb40713877b5f46f5': '1', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1222.375664] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e227467b-fe88-4d0d-a339-197e294d1363 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.384659] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef6f1cf-b0bb-4acc-b1f1-a29bd4d26e9f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.422027] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405c2cc7-7237-4ac3-95f9-eda187f59270 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.431277] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97090bff-d0cf-440c-85a8-afcf94a4ccbb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.445698] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1222.459958] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1222.486028] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1222.486028] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.641s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.851623] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0019c138-f0bd-487b-a08d-4f60f24a9674 tempest-ServerShowV257Test-1481275900 tempest-ServerShowV257Test-1481275900-project-member] Acquiring lock "23632a70-aec8-44aa-aa56-8ebe2b91840e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.851972] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0019c138-f0bd-487b-a08d-4f60f24a9674 tempest-ServerShowV257Test-1481275900 tempest-ServerShowV257Test-1481275900-project-member] Lock "23632a70-aec8-44aa-aa56-8ebe2b91840e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.486444] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.486672] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1224.486812] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1224.510608] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.510782] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.510895] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511067] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511215] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511340] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511459] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511576] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511689] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511802] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1224.511922] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1224.512460] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.752293] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.752536] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.753493] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.754600] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.748876] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.748637] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.739052] env[61570]: WARNING oslo_vmware.rw_handles [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1238.739052] env[61570]: ERROR oslo_vmware.rw_handles [ 1238.739052] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1238.740009] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1238.740266] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Copying Virtual Disk [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/f23d2276-c4b7-41ab-9c5b-7f8ee6a9b599/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1238.740542] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7badb0b-a2bc-46ac-9338-9ec3c0b979de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.751666] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for the task: (returnval){ [ 1238.751666] env[61570]: value = "task-4891358" [ 1238.751666] env[61570]: _type = "Task" [ 1238.751666] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.760463] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Task: {'id': task-4891358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.262450] env[61570]: DEBUG oslo_vmware.exceptions [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1239.262735] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.263306] env[61570]: ERROR nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.263306] env[61570]: Faults: ['InvalidArgument'] [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Traceback (most recent call last): [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] yield resources [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self.driver.spawn(context, instance, image_meta, [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self._fetch_image_if_missing(context, vi) [ 1239.263306] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] image_cache(vi, tmp_image_ds_loc) [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] vm_util.copy_virtual_disk( [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] session._wait_for_task(vmdk_copy_task) [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return self.wait_for_task(task_ref) [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return evt.wait() [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] result = hub.switch() [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1239.263687] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return self.greenlet.switch() [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self.f(*self.args, **self.kw) [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] raise exceptions.translate_fault(task_info.error) [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Faults: ['InvalidArgument'] [ 1239.264081] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] [ 1239.264081] env[61570]: INFO nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Terminating instance [ 1239.265230] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.265439] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.265679] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f088cd4f-d38e-41f1-ba63-fd503969bea6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.268116] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1239.268311] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1239.269072] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afd688a-66dd-47e8-a307-547c4a419876 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.275840] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1239.276081] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-591fe9de-2ebf-4d99-8c77-67e9a1f43957 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.278513] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.278699] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1239.279676] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19c30480-c6f0-4dd2-838f-813d68381474 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.286061] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Waiting for the task: (returnval){ [ 1239.286061] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521332bd-4de2-d6cd-0396-8e647e59f2e3" [ 1239.286061] env[61570]: _type = "Task" [ 1239.286061] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.293703] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521332bd-4de2-d6cd-0396-8e647e59f2e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.354035] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1239.354289] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1239.354289] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Deleting the datastore file [datastore2] 1435e51b-58b8-406f-9def-f9e6e7bffd8a {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1239.354585] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c12ae11-5d92-4f71-a87f-6968f8196d8d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.361263] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for the task: (returnval){ [ 1239.361263] env[61570]: value = "task-4891360" [ 1239.361263] env[61570]: _type = "Task" [ 1239.361263] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.369497] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Task: {'id': task-4891360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.796823] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1239.797180] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Creating directory with path [datastore2] vmware_temp/44ced595-8259-4e60-a1a8-29086e285ebf/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.797861] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b592451-3cd9-488c-b7c5-03e0cf414d2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.808521] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Created directory with path [datastore2] vmware_temp/44ced595-8259-4e60-a1a8-29086e285ebf/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.808720] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Fetch image to [datastore2] vmware_temp/44ced595-8259-4e60-a1a8-29086e285ebf/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1239.808893] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/44ced595-8259-4e60-a1a8-29086e285ebf/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1239.809704] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbea4a40-f0bc-4ce5-9ba1-d851712e54a7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.816574] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcaf3c3-7762-4471-bf6f-0306df118370 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.825749] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b03e5c4-4c89-4e2f-b2da-237d52579b58 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.856146] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7890bb79-9219-4d2f-939f-677e225107bb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.864834] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89f5cbcc-ee89-41c3-bd69-fc3adcd0e72b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.872592] env[61570]: DEBUG oslo_vmware.api [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Task: {'id': task-4891360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068477} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.872945] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1239.873275] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1239.873585] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1239.873817] env[61570]: INFO nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1239.876112] env[61570]: DEBUG nova.compute.claims [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1239.876303] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.876547] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.889712] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1240.114557] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.116183] env[61570]: ERROR nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1240.116183] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1240.116986] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] yield resources [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.driver.spawn(context, instance, image_meta, [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._fetch_image_if_missing(context, vi) [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image_fetch(context, vi, tmp_image_ds_loc) [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] images.fetch_image( [ 1240.117455] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] metadata = IMAGE_API.get(context, image_ref) [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return session.show(context, image_id, [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] _reraise_translated_image_exception(image_id) [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise new_exc.with_traceback(exc_trace) [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1240.117850] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1240.118364] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1240.118907] env[61570]: INFO nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Terminating instance [ 1240.118907] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.118907] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.119110] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1240.119171] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1240.119422] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65f7f653-ccd6-4ca1-bae3-0ffa25fc64c2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.123734] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e272718a-0b9e-48f4-bf10-687e057f0982 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.133955] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1240.134342] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87e34797-5d45-47ad-a6b0-2d605e504347 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.137316] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.137513] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1240.139020] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a288119e-f720-46ee-898f-18d43cf97089 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.151069] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for the task: (returnval){ [ 1240.151069] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529a6f58-9356-d905-ed32-e1bb7dcb8bfa" [ 1240.151069] env[61570]: _type = "Task" [ 1240.151069] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.163602] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1240.163731] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Creating directory with path [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.163997] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77643e44-837c-40a5-9f6f-47463367566d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.186227] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0706cce6-6c7b-4809-89ce-7e5288c5ccff {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.190836] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Created directory with path [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.191069] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Fetch image to [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1240.191235] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1240.193142] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9aec23-8979-4d20-a61a-4ef8b4c73b7f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.201797] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d442015-6765-4199-8f41-83afb48a6a58 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.207331] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f3f50d-9d34-49ad-856c-598c1b5dc3c6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.217812] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcae956-746e-4089-90af-99aeca1d1225 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.222319] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1240.222543] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1240.222730] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Deleting the datastore file [datastore2] c6acb8ba-7877-44c9-a78b-f15fc6d47b28 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1240.247986] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c00290fd-f940-4830-a6d9-d4549989d091 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.251401] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c5b560-2675-48d9-93c3-3186b1494255 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.283942] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e540b6-fa2e-4351-ba18-da51c08f2014 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.291555] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9931b653-7965-40c7-9e93-46af5caaadf3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.298022] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Waiting for the task: (returnval){ [ 1240.298022] env[61570]: value = "task-4891362" [ 1240.298022] env[61570]: _type = "Task" [ 1240.298022] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.301088] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f54b95bf-b2c0-4d15-91cd-8800ce24e845 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.311226] env[61570]: DEBUG nova.compute.provider_tree [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.318736] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Task: {'id': task-4891362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.323916] env[61570]: DEBUG nova.scheduler.client.report [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1240.339846] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1240.342071] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.465s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.342688] env[61570]: ERROR nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1240.342688] env[61570]: Faults: ['InvalidArgument'] [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Traceback (most recent call last): [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self.driver.spawn(context, instance, image_meta, [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self._fetch_image_if_missing(context, vi) [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] image_cache(vi, tmp_image_ds_loc) [ 1240.342688] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] vm_util.copy_virtual_disk( [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] session._wait_for_task(vmdk_copy_task) [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return self.wait_for_task(task_ref) [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return evt.wait() [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] result = hub.switch() [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] return self.greenlet.switch() [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1240.343071] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] self.f(*self.args, **self.kw) [ 1240.343501] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1240.343501] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] raise exceptions.translate_fault(task_info.error) [ 1240.343501] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1240.343501] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Faults: ['InvalidArgument'] [ 1240.343501] env[61570]: ERROR nova.compute.manager [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] [ 1240.343501] env[61570]: DEBUG nova.compute.utils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1240.345075] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Build of instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a was re-scheduled: A specified parameter was not correct: fileType [ 1240.345075] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1240.345534] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1240.345724] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1240.345912] env[61570]: DEBUG nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1240.346107] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1240.399394] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1240.463710] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1240.463710] env[61570]: DEBUG oslo_vmware.rw_handles [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1240.811158] env[61570]: DEBUG oslo_vmware.api [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Task: {'id': task-4891362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070155} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.812133] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1240.813309] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1240.813512] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1240.813691] env[61570]: INFO nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1240.817087] env[61570]: DEBUG nova.network.neutron [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.818535] env[61570]: DEBUG nova.compute.claims [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1240.818713] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.818939] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.834908] env[61570]: INFO nova.compute.manager [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Took 0.49 seconds to deallocate network for instance. [ 1240.937815] env[61570]: INFO nova.scheduler.client.report [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Deleted allocations for instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a [ 1240.966992] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e8dffdc2-4049-46f9-8306-57c2a67b68b4 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.092s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.968301] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 464.564s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.968856] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Acquiring lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.968856] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.969032] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.973649] env[61570]: INFO nova.compute.manager [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Terminating instance [ 1240.975354] env[61570]: DEBUG nova.compute.manager [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1240.975543] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1240.976419] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5377df2a-6f2f-45f2-a06a-b824403b4ea7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.981757] env[61570]: DEBUG nova.compute.manager [None req-0dec7974-69f0-4038-8170-9194c764ba51 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: fa47c527-5dc6-4162-b4d8-d8bab3f2b13c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1240.990553] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81612063-45b2-4f04-8c98-a82b40a80499 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.007285] env[61570]: DEBUG nova.compute.manager [None req-0dec7974-69f0-4038-8170-9194c764ba51 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: fa47c527-5dc6-4162-b4d8-d8bab3f2b13c] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.023515] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1435e51b-58b8-406f-9def-f9e6e7bffd8a could not be found. [ 1241.023515] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1241.023701] env[61570]: INFO nova.compute.manager [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1241.023947] env[61570]: DEBUG oslo.service.loopingcall [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1241.028363] env[61570]: DEBUG nova.compute.manager [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1241.028464] env[61570]: DEBUG nova.network.neutron [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1241.039839] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0dec7974-69f0-4038-8170-9194c764ba51 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "fa47c527-5dc6-4162-b4d8-d8bab3f2b13c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.866s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.051439] env[61570]: DEBUG nova.compute.manager [None req-84723f06-ca83-46ac-b41f-2fcc576388d1 tempest-ServersTestFqdnHostnames-114570745 tempest-ServersTestFqdnHostnames-114570745-project-member] [instance: 1f6709ec-fb9d-490d-beb8-53883fb533e3] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.057335] env[61570]: DEBUG nova.network.neutron [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.065809] env[61570]: INFO nova.compute.manager [-] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] Took 0.04 seconds to deallocate network for instance. [ 1241.085974] env[61570]: DEBUG nova.compute.manager [None req-84723f06-ca83-46ac-b41f-2fcc576388d1 tempest-ServersTestFqdnHostnames-114570745 tempest-ServersTestFqdnHostnames-114570745-project-member] [instance: 1f6709ec-fb9d-490d-beb8-53883fb533e3] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.112233] env[61570]: DEBUG oslo_concurrency.lockutils [None req-84723f06-ca83-46ac-b41f-2fcc576388d1 tempest-ServersTestFqdnHostnames-114570745 tempest-ServersTestFqdnHostnames-114570745-project-member] Lock "1f6709ec-fb9d-490d-beb8-53883fb533e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.252s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.126061] env[61570]: DEBUG nova.compute.manager [None req-b16b6e0b-1c3f-4c56-b272-460bf5836f44 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: f6b0686c-b81d-4b18-bacf-be573a28a277] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.159622] env[61570]: DEBUG nova.compute.manager [None req-b16b6e0b-1c3f-4c56-b272-460bf5836f44 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: f6b0686c-b81d-4b18-bacf-be573a28a277] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.193232] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b16b6e0b-1c3f-4c56-b272-460bf5836f44 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "f6b0686c-b81d-4b18-bacf-be573a28a277" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.197s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.196186] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2a5c51-18b9-404b-bf66-c01e4179aecd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.206697] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8c336e-5d11-4d1f-860f-a12c8a255e05 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.210272] env[61570]: DEBUG oslo_concurrency.lockutils [None req-2291144f-d28b-4731-810f-3c779efd5331 tempest-VolumesAssistedSnapshotsTest-563499322 tempest-VolumesAssistedSnapshotsTest-563499322-project-member] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.242s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.212050] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 119.999s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.212050] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1435e51b-58b8-406f-9def-f9e6e7bffd8a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1241.212050] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1435e51b-58b8-406f-9def-f9e6e7bffd8a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.212250] env[61570]: DEBUG nova.compute.manager [None req-5cb87ca1-3ba6-418e-a820-ede9f99668fb tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: ef7c996b-f62f-4146-b48b-c865c362c12c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.242635] env[61570]: DEBUG nova.compute.manager [None req-5cb87ca1-3ba6-418e-a820-ede9f99668fb tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: ef7c996b-f62f-4146-b48b-c865c362c12c] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.244111] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5353faf7-58e9-4edf-9bd9-77cf8172071c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.252789] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3597b647-b337-4f0c-ad16-bb0c1f5835ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.271539] env[61570]: DEBUG nova.compute.provider_tree [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.274023] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5cb87ca1-3ba6-418e-a820-ede9f99668fb tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "ef7c996b-f62f-4146-b48b-c865c362c12c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.750s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.280547] env[61570]: DEBUG nova.scheduler.client.report [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1241.284710] env[61570]: DEBUG nova.compute.manager [None req-44825749-21a6-40ee-873c-2b2a48aec073 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: e9385963-cda4-4778-92d6-4a20722b34bb] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.297625] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.478s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.299615] env[61570]: ERROR nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1241.299615] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.300323] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.driver.spawn(context, instance, image_meta, [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._fetch_image_if_missing(context, vi) [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image_fetch(context, vi, tmp_image_ds_loc) [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] images.fetch_image( [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] metadata = IMAGE_API.get(context, image_ref) [ 1241.300847] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return session.show(context, image_id, [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] _reraise_translated_image_exception(image_id) [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise new_exc.with_traceback(exc_trace) [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1241.301219] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1241.301566] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.301843] env[61570]: DEBUG nova.compute.utils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1241.301843] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Build of instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 was re-scheduled: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1241.302817] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1241.302817] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1241.302817] env[61570]: DEBUG nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1241.302817] env[61570]: DEBUG nova.network.neutron [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1241.315396] env[61570]: DEBUG nova.compute.manager [None req-44825749-21a6-40ee-873c-2b2a48aec073 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] [instance: e9385963-cda4-4778-92d6-4a20722b34bb] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.340273] env[61570]: DEBUG oslo_concurrency.lockutils [None req-44825749-21a6-40ee-873c-2b2a48aec073 tempest-AttachInterfacesTestJSON-1404949640 tempest-AttachInterfacesTestJSON-1404949640-project-member] Lock "e9385963-cda4-4778-92d6-4a20722b34bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.305s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.352485] env[61570]: DEBUG nova.compute.manager [None req-117e8b18-4008-44a6-b16f-2ddab1324259 tempest-ServerMetadataNegativeTestJSON-777350147 tempest-ServerMetadataNegativeTestJSON-777350147-project-member] [instance: b119eaef-9f58-46e6-9bc4-f47fa88d53c2] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.381592] env[61570]: DEBUG nova.compute.manager [None req-117e8b18-4008-44a6-b16f-2ddab1324259 tempest-ServerMetadataNegativeTestJSON-777350147 tempest-ServerMetadataNegativeTestJSON-777350147-project-member] [instance: b119eaef-9f58-46e6-9bc4-f47fa88d53c2] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1241.421368] env[61570]: DEBUG oslo_concurrency.lockutils [None req-117e8b18-4008-44a6-b16f-2ddab1324259 tempest-ServerMetadataNegativeTestJSON-777350147 tempest-ServerMetadataNegativeTestJSON-777350147-project-member] Lock "b119eaef-9f58-46e6-9bc4-f47fa88d53c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.579s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.432071] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.529027] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.529027] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.529027] env[61570]: INFO nova.compute.claims [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1241.679475] env[61570]: DEBUG neutronclient.v2_0.client [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1241.681980] env[61570]: ERROR nova.compute.manager [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1241.681980] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.682395] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.driver.spawn(context, instance, image_meta, [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._fetch_image_if_missing(context, vi) [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image_fetch(context, vi, tmp_image_ds_loc) [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] images.fetch_image( [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] metadata = IMAGE_API.get(context, image_ref) [ 1241.682729] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return session.show(context, image_id, [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] _reraise_translated_image_exception(image_id) [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise new_exc.with_traceback(exc_trace) [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = getattr(controller, method)(*args, **kwargs) [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._get(image_id) [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1241.683087] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] resp, body = self.http_client.get(url, headers=header) [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.request(url, 'GET', **kwargs) [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self._handle_response(resp) [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exc.from_response(resp, resp.content) [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.683453] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._build_and_run_instance(context, instance, image, [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exception.RescheduledException( [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.RescheduledException: Build of instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 was re-scheduled: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1241.683906] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] exception_handler_v20(status_code, error_body) [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise client_exc(message=error_message, [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Neutron server returns request_ids: ['req-6dbcda59-185c-4748-90b5-58601ac1434f'] [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._deallocate_network(context, instance, requested_networks) [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.network_api.deallocate_for_instance( [ 1241.684293] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] data = neutron.list_ports(**search_opts) [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.list('ports', self.ports_path, retrieve_all, [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] for r in self._pagination(collection, path, **params): [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] res = self.get(path, params=params) [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.684674] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.retry_request("GET", action, body=body, [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.do_request(method, action, body=body, [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._handle_fault_response(status_code, replybody, resp) [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exception.Unauthorized() [ 1241.685538] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.Unauthorized: Not authorized. [ 1241.685928] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1241.756544] env[61570]: INFO nova.scheduler.client.report [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Deleted allocations for instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 [ 1241.779032] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d4655269-adc5-41c2-9c88-3d183dca0d2f tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 640.515s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.780031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.053s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.780293] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.780433] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.780602] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.782904] env[61570]: INFO nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Terminating instance [ 1241.784662] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquiring lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.784866] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Acquired lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.785090] env[61570]: DEBUG nova.network.neutron [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1241.791729] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1241.855030] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.905873] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2388e24c-86b8-42f0-ade7-a5e4070b8858 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.915890] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf60684d-3eb4-469d-8a91-b25c9330cef6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.950392] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4f8e72-3569-414c-8f19-6a9cc602518e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.957865] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac535194-2e42-4018-9327-19c95fe46f94 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.972277] env[61570]: DEBUG nova.compute.provider_tree [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.981947] env[61570]: DEBUG nova.scheduler.client.report [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1241.999864] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.473s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.000453] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1242.002953] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.148s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.004379] env[61570]: INFO nova.compute.claims [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.043311] env[61570]: DEBUG nova.compute.utils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1242.044611] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1242.045103] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1242.054691] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1242.115088] env[61570]: DEBUG nova.policy [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60d2ebeeff764d2caf68384942cee800', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9697eba07488413b9b05222af1a8e33d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1242.126931] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1242.133261] env[61570]: DEBUG nova.network.neutron [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Updating instance_info_cache with network_info: [{"id": "3a75fb31-58b0-4547-95af-af9a90f7375e", "address": "fa:16:3e:5e:df:85", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a75fb31-58", "ovs_interfaceid": "3a75fb31-58b0-4547-95af-af9a90f7375e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.148096] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Releasing lock "refresh_cache-c6acb8ba-7877-44c9-a78b-f15fc6d47b28" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.148601] env[61570]: DEBUG nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1242.148805] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1242.149327] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8eefcb26-8421-4957-a876-7ecf190c3df3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.161112] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1242.161370] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1242.161526] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1242.161707] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1242.161851] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1242.161994] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1242.162220] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1242.162381] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1242.162558] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1242.162707] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1242.162876] env[61570]: DEBUG nova.virt.hardware [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1242.165780] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad70389-a222-4d12-ad02-800e29069e20 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.179357] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73a183-dc28-4f0e-b44d-193e239a1d0a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.191814] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d1e5e3-eb86-4ca8-9aa8-0ba7f2f15f40 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.205257] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6acb8ba-7877-44c9-a78b-f15fc6d47b28 could not be found. [ 1242.205517] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1242.205657] env[61570]: INFO nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1242.205873] env[61570]: DEBUG oslo.service.loopingcall [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.206575] env[61570]: DEBUG nova.compute.manager [-] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1242.206719] env[61570]: DEBUG nova.network.neutron [-] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1242.373305] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a08940-082e-452c-a5b2-c52b1c5b2695 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.382215] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038fb536-6d0c-46d7-a052-fdabf06f7143 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.386182] env[61570]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1242.386182] env[61570]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-763a4c96-7342-4530-b7f0-ecbb3c640dc9'] [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1242.386753] env[61570]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1242.387677] env[61570]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.388610] env[61570]: ERROR oslo.service.loopingcall [ 1242.389438] env[61570]: ERROR nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.418116] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf63848d-4889-4291-a142-432a79783218 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.426539] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f005c83-dddc-43e2-b1e7-b295b3dcd9b0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.443025] env[61570]: DEBUG nova.compute.provider_tree [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.446269] env[61570]: ERROR nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] exception_handler_v20(status_code, error_body) [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise client_exc(message=error_message, [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Neutron server returns request_ids: ['req-763a4c96-7342-4530-b7f0-ecbb3c640dc9'] [ 1242.446269] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During handling of the above exception, another exception occurred: [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Traceback (most recent call last): [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._delete_instance(context, instance, bdms) [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._shutdown_instance(context, instance, bdms) [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._try_deallocate_network(context, instance, requested_networks) [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] with excutils.save_and_reraise_exception(): [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.446665] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.force_reraise() [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise self.value [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] _deallocate_network_with_retries() [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return evt.wait() [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = hub.switch() [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.greenlet.switch() [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1242.447142] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = func(*self.args, **self.kw) [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] result = f(*args, **kwargs) [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._deallocate_network( [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self.network_api.deallocate_for_instance( [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] data = neutron.list_ports(**search_opts) [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.list('ports', self.ports_path, retrieve_all, [ 1242.447477] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] for r in self._pagination(collection, path, **params): [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] res = self.get(path, params=params) [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.retry_request("GET", action, body=body, [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1242.447825] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] return self.do_request(method, action, body=body, [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] ret = obj(*args, **kwargs) [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] self._handle_fault_response(status_code, replybody, resp) [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.448186] env[61570]: ERROR nova.compute.manager [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] [ 1242.456107] env[61570]: DEBUG nova.scheduler.client.report [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1242.476121] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.473s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.476338] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1242.499053] env[61570]: DEBUG oslo_concurrency.lockutils [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.718s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.499726] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 121.287s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.499962] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] During sync_power_state the instance has a pending task (deleting). Skip. [ 1242.500095] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "c6acb8ba-7877-44c9-a78b-f15fc6d47b28" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.558848] env[61570]: DEBUG nova.compute.utils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1242.562912] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1242.562912] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1242.578905] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1242.581720] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Successfully created port: 66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.623957] env[61570]: INFO nova.compute.manager [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] [instance: c6acb8ba-7877-44c9-a78b-f15fc6d47b28] Successfully reverted task state from None on failure for instance. [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server [None req-89d7e53f-b1eb-4202-82a8-a8318b79f86c tempest-ServerDiagnosticsTest-1795029508 tempest-ServerDiagnosticsTest-1795029508-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-763a4c96-7342-4530-b7f0-ecbb3c640dc9'] [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1242.627865] env[61570]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.628608] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1242.629409] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1242.630177] env[61570]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.630937] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1242.631675] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1242.632698] env[61570]: ERROR oslo_messaging.rpc.server [ 1242.661025] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1242.669291] env[61570]: DEBUG nova.policy [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab598bbe4274bbab3ca0f3ae8174bb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1453684a18b64487b99eedf9f842fd60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1242.691262] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1242.691558] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1242.691753] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1242.691956] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1242.692154] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1242.692315] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1242.692531] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1242.692695] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1242.692887] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1242.693103] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1242.693317] env[61570]: DEBUG nova.virt.hardware [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1242.694309] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08baab91-1422-4e8e-be72-b1c86301da5b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.705281] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9546945a-b8fa-4871-bde7-2ccd966cf52a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.350659] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Successfully created port: 2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1244.267925] env[61570]: DEBUG nova.compute.manager [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Received event network-vif-plugged-2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1244.270026] env[61570]: DEBUG oslo_concurrency.lockutils [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] Acquiring lock "c69c8589-88e1-481e-87b8-55608322440c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.270026] env[61570]: DEBUG oslo_concurrency.lockutils [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] Lock "c69c8589-88e1-481e-87b8-55608322440c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.270162] env[61570]: DEBUG oslo_concurrency.lockutils [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] Lock "c69c8589-88e1-481e-87b8-55608322440c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.270523] env[61570]: DEBUG nova.compute.manager [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] No waiting events found dispatching network-vif-plugged-2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1244.270523] env[61570]: WARNING nova.compute.manager [req-0b50db02-89cc-4c15-a991-97fafb6faa8f req-a92b0c9f-3487-4724-b260-a7d7c1ba878b service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Received unexpected event network-vif-plugged-2c9da894-5198-47c7-8def-ed9c4d722b6a for instance with vm_state building and task_state spawning. [ 1244.271433] env[61570]: DEBUG nova.compute.manager [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Received event network-vif-plugged-66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1244.271621] env[61570]: DEBUG oslo_concurrency.lockutils [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] Acquiring lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.271804] env[61570]: DEBUG oslo_concurrency.lockutils [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] Lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.271961] env[61570]: DEBUG oslo_concurrency.lockutils [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] Lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.272144] env[61570]: DEBUG nova.compute.manager [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] No waiting events found dispatching network-vif-plugged-66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1244.272305] env[61570]: WARNING nova.compute.manager [req-0588c8f6-25ab-46f5-adf7-230510da8618 req-5345e701-a90c-427f-9178-3cacd591712a service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Received unexpected event network-vif-plugged-66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb for instance with vm_state building and task_state spawning. [ 1244.278426] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Successfully updated port: 2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1244.291599] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.291774] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquired lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.291885] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1244.343648] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1244.390585] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Successfully updated port: 66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1244.401322] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.401528] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquired lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.401658] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1244.512193] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1244.587565] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Updating instance_info_cache with network_info: [{"id": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "address": "fa:16:3e:63:41:7a", "network": {"id": "02b0c070-e18f-47cb-9f1f-058927fad30e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-106495185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1453684a18b64487b99eedf9f842fd60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c9da894-51", "ovs_interfaceid": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.614073] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Releasing lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.614654] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance network_info: |[{"id": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "address": "fa:16:3e:63:41:7a", "network": {"id": "02b0c070-e18f-47cb-9f1f-058927fad30e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-106495185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1453684a18b64487b99eedf9f842fd60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c9da894-51", "ovs_interfaceid": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1244.615816] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:41:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c9da894-5198-47c7-8def-ed9c4d722b6a', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1244.626814] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Creating folder: Project (1453684a18b64487b99eedf9f842fd60). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1244.628676] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01aed56f-70ab-43dd-8043-56f18137c8b9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.640668] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Created folder: Project (1453684a18b64487b99eedf9f842fd60) in parent group-v953072. [ 1244.641394] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Creating folder: Instances. Parent ref: group-v953140. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1244.641759] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9809b774-3a97-4370-8d0b-cfb37d407821 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.651707] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Created folder: Instances in parent group-v953140. [ 1244.652579] env[61570]: DEBUG oslo.service.loopingcall [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1244.652579] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1244.652579] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cd422bd-6542-4616-b62d-4577baa02861 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.673966] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1244.673966] env[61570]: value = "task-4891365" [ 1244.673966] env[61570]: _type = "Task" [ 1244.673966] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.682857] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891365, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.099635] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Updating instance_info_cache with network_info: [{"id": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "address": "fa:16:3e:13:7a:66", "network": {"id": "e5ac84f7-3fa2-4a28-b283-497d6ef54733", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-812742832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9697eba07488413b9b05222af1a8e33d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66aa8a82-a3", "ovs_interfaceid": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.115657] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Releasing lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.116125] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance network_info: |[{"id": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "address": "fa:16:3e:13:7a:66", "network": {"id": "e5ac84f7-3fa2-4a28-b283-497d6ef54733", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-812742832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9697eba07488413b9b05222af1a8e33d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66aa8a82-a3", "ovs_interfaceid": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1245.116487] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:7a:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57c65f87-60fd-4882-ab30-31db49131b46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1245.124694] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Creating folder: Project (9697eba07488413b9b05222af1a8e33d). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1245.125315] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b49e3dd-6a37-4d43-97c7-0d4412a17a73 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.136478] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Created folder: Project (9697eba07488413b9b05222af1a8e33d) in parent group-v953072. [ 1245.136748] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Creating folder: Instances. Parent ref: group-v953143. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1245.136933] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4d1eeee-0210-4164-ac64-15614724dfc2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.147382] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Created folder: Instances in parent group-v953143. [ 1245.147382] env[61570]: DEBUG oslo.service.loopingcall [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.148535] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1245.148535] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0587553-33da-454a-80e1-abf6fb79685a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.167576] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1245.167576] env[61570]: value = "task-4891368" [ 1245.167576] env[61570]: _type = "Task" [ 1245.167576] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.176144] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891368, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.185068] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891365, 'name': CreateVM_Task, 'duration_secs': 0.364497} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.185068] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1245.185290] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.185331] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.185672] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1245.185923] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8345a97e-d2ae-4436-b253-fad5280aacea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.191214] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for the task: (returnval){ [ 1245.191214] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526a3ac0-f3e5-3649-5594-a0a179045e3c" [ 1245.191214] env[61570]: _type = "Task" [ 1245.191214] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.200681] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526a3ac0-f3e5-3649-5594-a0a179045e3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.683675] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891368, 'name': CreateVM_Task, 'duration_secs': 0.378699} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.683953] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1245.684763] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.702424] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.706035] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1245.706035] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.706035] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.706035] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1245.706335] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40bfe809-c80a-4050-9b74-6f7dfa2c8c46 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.709569] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for the task: (returnval){ [ 1245.709569] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5253a90c-f360-4a3f-5dab-087d3f68c286" [ 1245.709569] env[61570]: _type = "Task" [ 1245.709569] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.718273] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5253a90c-f360-4a3f-5dab-087d3f68c286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.226062] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.226324] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1246.226592] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.478064] env[61570]: DEBUG nova.compute.manager [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Received event network-changed-66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1246.478304] env[61570]: DEBUG nova.compute.manager [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Refreshing instance network info cache due to event network-changed-66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1246.478483] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] Acquiring lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.478623] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] Acquired lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.478804] env[61570]: DEBUG nova.network.neutron [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Refreshing network info cache for port 66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1246.587746] env[61570]: DEBUG nova.compute.manager [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Received event network-changed-2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1246.587746] env[61570]: DEBUG nova.compute.manager [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Refreshing instance network info cache due to event network-changed-2c9da894-5198-47c7-8def-ed9c4d722b6a. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1246.587746] env[61570]: DEBUG oslo_concurrency.lockutils [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] Acquiring lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.590768] env[61570]: DEBUG oslo_concurrency.lockutils [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] Acquired lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.590768] env[61570]: DEBUG nova.network.neutron [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Refreshing network info cache for port 2c9da894-5198-47c7-8def-ed9c4d722b6a {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1246.868832] env[61570]: DEBUG nova.network.neutron [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Updated VIF entry in instance network info cache for port 66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1246.869238] env[61570]: DEBUG nova.network.neutron [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Updating instance_info_cache with network_info: [{"id": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "address": "fa:16:3e:13:7a:66", "network": {"id": "e5ac84f7-3fa2-4a28-b283-497d6ef54733", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-812742832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9697eba07488413b9b05222af1a8e33d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66aa8a82-a3", "ovs_interfaceid": "66aa8a82-a3ab-46b7-8d69-e4aefffdb8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.886401] env[61570]: DEBUG oslo_concurrency.lockutils [req-1bb4628c-95dc-4a74-8d55-0628aea68222 req-77d80ee4-f549-43c5-8532-6f7cc514d273 service nova] Releasing lock "refresh_cache-db38d263-aa3d-46b1-a13d-1469155fad84" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.064041] env[61570]: DEBUG nova.network.neutron [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Updated VIF entry in instance network info cache for port 2c9da894-5198-47c7-8def-ed9c4d722b6a. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1247.064400] env[61570]: DEBUG nova.network.neutron [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] [instance: c69c8589-88e1-481e-87b8-55608322440c] Updating instance_info_cache with network_info: [{"id": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "address": "fa:16:3e:63:41:7a", "network": {"id": "02b0c070-e18f-47cb-9f1f-058927fad30e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-106495185-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1453684a18b64487b99eedf9f842fd60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c9da894-51", "ovs_interfaceid": "2c9da894-5198-47c7-8def-ed9c4d722b6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.078189] env[61570]: DEBUG oslo_concurrency.lockutils [req-52717c4c-3cfe-4f54-b283-0aff69fcde31 req-61f8f680-6b72-4635-9bf7-6a455a0c948a service nova] Releasing lock "refresh_cache-c69c8589-88e1-481e-87b8-55608322440c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.476202] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "04741959-c2c4-4b38-92e7-43f941818775" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.476710] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.510232] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "9711e15c-ef00-47c4-afc3-b5fb93277c63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.510232] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "9711e15c-ef00-47c4-afc3-b5fb93277c63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.753927] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.754332] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1282.752847] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.765744] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.766091] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.766179] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.766294] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1282.767869] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccc7520-43a3-41ef-8c60-a5ed9e32f0f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.776959] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63daa94-3a3a-49d4-9721-4f935e507c80 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.792177] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29dc3d57-0a4c-4b70-b877-2589764cc57d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.799319] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e19a23e-6528-4a70-9367-60c2a3187c99 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.830979] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180562MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1282.831171] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.831381] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.914250] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.914470] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance df50c085-3eee-44c2-8d14-263f3bf49b2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.914601] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.914723] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.914873] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.914975] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.915141] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.915212] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.915325] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.915405] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1282.927415] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.938858] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.949708] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.962807] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8fa50d72-12ee-4345-9ad7-07896fe34776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.974641] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ea7cddf9-4529-4716-bc4e-8490e3f5ef83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.984312] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 3dec2bfa-16bf-423d-9ce3-16da3d9e9397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1282.994729] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 23632a70-aec8-44aa-aa56-8ebe2b91840e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.005828] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.016892] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9711e15c-ef00-47c4-afc3-b5fb93277c63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1283.017175] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1283.017341] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '56', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_bb2aa2a9c3af4e059ab13f940dbf497a': '1', 'io_workload': '10', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1', 'num_task_spawning': '2', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1283.253027] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee15ca4d-ab06-4c09-8eb9-15b79f84495f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.260624] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77917e47-af92-4b66-8f88-b6514b680247 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.292799] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3d7f75-bca6-4e87-b8b2-24a1b86b3334 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.300961] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d066e74-6b3b-40dd-ae5e-8f202c017401 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.315384] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.325883] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.340691] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1283.340892] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.509s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.340882] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.341108] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1285.341161] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1285.362196] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362374] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362473] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362602] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362724] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362843] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.362962] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.363094] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.363211] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.363325] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1285.363443] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1285.753241] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.753468] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.753666] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.754090] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.754413] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.167342] env[61570]: WARNING oslo_vmware.rw_handles [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1289.167342] env[61570]: ERROR oslo_vmware.rw_handles [ 1289.167974] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1289.169550] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1289.169791] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Copying Virtual Disk [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/6a022536-46d2-4a9c-affe-717deb375fda/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1289.170093] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93c2f233-3447-4804-984e-dea72daa8aad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.178307] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for the task: (returnval){ [ 1289.178307] env[61570]: value = "task-4891369" [ 1289.178307] env[61570]: _type = "Task" [ 1289.178307] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.186738] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Task: {'id': task-4891369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.688711] env[61570]: DEBUG oslo_vmware.exceptions [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1289.689041] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.689617] env[61570]: ERROR nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1289.689617] env[61570]: Faults: ['InvalidArgument'] [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Traceback (most recent call last): [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] yield resources [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self.driver.spawn(context, instance, image_meta, [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self._fetch_image_if_missing(context, vi) [ 1289.689617] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] image_cache(vi, tmp_image_ds_loc) [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] vm_util.copy_virtual_disk( [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] session._wait_for_task(vmdk_copy_task) [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return self.wait_for_task(task_ref) [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return evt.wait() [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] result = hub.switch() [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1289.690087] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return self.greenlet.switch() [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self.f(*self.args, **self.kw) [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] raise exceptions.translate_fault(task_info.error) [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Faults: ['InvalidArgument'] [ 1289.690577] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] [ 1289.690577] env[61570]: INFO nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Terminating instance [ 1289.691578] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.691796] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1289.692091] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55c6689e-6983-4807-a891-6d58c10a0b45 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.694517] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1289.694718] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1289.695518] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad0e491-9826-4bca-a62f-7004650745fb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.706428] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1289.706428] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bedb2e1-1da1-4a54-a55d-b42b72e9bd46 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.708157] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1289.708282] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1289.708989] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3209407c-202f-435b-80e9-b3814b3561d8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.715518] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1289.715518] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52745ba3-f48e-2fc8-2ee4-71d89a0c5622" [ 1289.715518] env[61570]: _type = "Task" [ 1289.715518] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.724390] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52745ba3-f48e-2fc8-2ee4-71d89a0c5622, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.782247] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1289.782494] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1289.782672] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Deleting the datastore file [datastore2] e4f4573c-040a-49d6-ba20-e051a265b3e4 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.782946] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a74a2f18-d7fb-4f52-b3a1-71330e402048 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.790119] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for the task: (returnval){ [ 1289.790119] env[61570]: value = "task-4891371" [ 1289.790119] env[61570]: _type = "Task" [ 1289.790119] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.798759] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Task: {'id': task-4891371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.939862] env[61570]: DEBUG oslo_concurrency.lockutils [None req-df91f14b-7087-4aa6-af20-04d1bfd682b8 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "7cb72b52-c3e2-46ea-8874-8832defc02ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.940167] env[61570]: DEBUG oslo_concurrency.lockutils [None req-df91f14b-7087-4aa6-af20-04d1bfd682b8 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "7cb72b52-c3e2-46ea-8874-8832defc02ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.227399] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1290.227668] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating directory with path [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1290.227861] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b630f48-944e-4f2c-aff0-9047b2996d56 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.240578] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created directory with path [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1290.240793] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Fetch image to [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1290.241035] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1290.241855] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b772bd08-dfa5-4d8a-b1ea-efe6b994a6f4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.249929] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb67938-2abf-4b83-8837-83abbb0141d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.259933] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bba0ce-4e3a-4b33-bd20-e8ae615f4d98 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.295573] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9588f04-cbf7-4035-9579-3e2d961afe74 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.305996] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-457a29a9-003b-4c8c-a2a8-e8faee23cede {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.308145] env[61570]: DEBUG oslo_vmware.api [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Task: {'id': task-4891371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083403} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.308423] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1290.308603] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1290.308800] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1290.308968] env[61570]: INFO nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1290.311842] env[61570]: DEBUG nova.compute.claims [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1290.312061] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.312355] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.332570] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1290.390967] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1290.451436] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1290.451671] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1290.710858] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86252264-4b20-41b1-9132-2bff020a0633 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.719790] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa225a87-d02d-4936-90bc-5873dbb09ced {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.752106] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.753233] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca2cde0-2488-4b4f-9b8a-d01959ea1dad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.761754] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0159544a-02bf-4386-8eea-13150d21fbff {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.777086] env[61570]: DEBUG nova.compute.provider_tree [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.787525] env[61570]: DEBUG nova.scheduler.client.report [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1290.806037] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.493s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.806037] env[61570]: ERROR nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1290.806037] env[61570]: Faults: ['InvalidArgument'] [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Traceback (most recent call last): [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self.driver.spawn(context, instance, image_meta, [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1290.806037] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self._fetch_image_if_missing(context, vi) [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] image_cache(vi, tmp_image_ds_loc) [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] vm_util.copy_virtual_disk( [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] session._wait_for_task(vmdk_copy_task) [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return self.wait_for_task(task_ref) [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return evt.wait() [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] result = hub.switch() [ 1290.806460] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] return self.greenlet.switch() [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] self.f(*self.args, **self.kw) [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] raise exceptions.translate_fault(task_info.error) [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Faults: ['InvalidArgument'] [ 1290.806842] env[61570]: ERROR nova.compute.manager [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] [ 1290.806842] env[61570]: DEBUG nova.compute.utils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1290.808743] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Build of instance e4f4573c-040a-49d6-ba20-e051a265b3e4 was re-scheduled: A specified parameter was not correct: fileType [ 1290.808743] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1290.809123] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1290.809376] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1290.809600] env[61570]: DEBUG nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1290.809803] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1291.332423] env[61570]: DEBUG nova.network.neutron [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.345347] env[61570]: INFO nova.compute.manager [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Took 0.54 seconds to deallocate network for instance. [ 1291.453555] env[61570]: INFO nova.scheduler.client.report [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Deleted allocations for instance e4f4573c-040a-49d6-ba20-e051a265b3e4 [ 1291.479888] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e7e76ba8-a6cf-49c6-a6fb-cb7398d577d8 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.598s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.481097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.886s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.481344] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Acquiring lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.481550] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.481715] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.485388] env[61570]: INFO nova.compute.manager [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Terminating instance [ 1291.486125] env[61570]: DEBUG nova.compute.manager [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1291.486330] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1291.486824] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6477a2f-c182-4ba1-b132-262ad2b62113 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.498146] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981c62f6-624f-452f-a66d-9f87d4337286 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.509876] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1291.535621] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e4f4573c-040a-49d6-ba20-e051a265b3e4 could not be found. [ 1291.535875] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1291.536137] env[61570]: INFO nova.compute.manager [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1291.536356] env[61570]: DEBUG oslo.service.loopingcall [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1291.536531] env[61570]: DEBUG nova.compute.manager [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1291.536626] env[61570]: DEBUG nova.network.neutron [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1291.569778] env[61570]: DEBUG nova.network.neutron [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.572097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.572463] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.574257] env[61570]: INFO nova.compute.claims [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.578405] env[61570]: INFO nova.compute.manager [-] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] Took 0.04 seconds to deallocate network for instance. [ 1291.700072] env[61570]: DEBUG oslo_concurrency.lockutils [None req-23f1fea0-c465-4218-ba40-a53eaa7bc213 tempest-VolumesAdminNegativeTest-808236367 tempest-VolumesAdminNegativeTest-808236367-project-member] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.219s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.704127] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 170.491s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.704299] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: e4f4573c-040a-49d6-ba20-e051a265b3e4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1291.704505] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "e4f4573c-040a-49d6-ba20-e051a265b3e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.906389] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d716de05-fe9e-4604-836a-2ed422b7481d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.914290] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250d249d-97f8-425a-ae49-5d4c936a8bf9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.943901] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df061200-6513-4b7e-850f-3a61616a0c6c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.952166] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ec1b62-8189-4b57-a139-69be311ba9d5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.967138] env[61570]: DEBUG nova.compute.provider_tree [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.976195] env[61570]: DEBUG nova.scheduler.client.report [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1291.990286] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.418s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.003586] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "4dbef093-de7c-4af6-9234-6deeedd2c27d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.003817] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "4dbef093-de7c-4af6-9234-6deeedd2c27d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.009382] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "4dbef093-de7c-4af6-9234-6deeedd2c27d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.009882] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1292.041992] env[61570]: DEBUG nova.compute.utils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1292.043458] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1292.043785] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1292.052636] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1292.107417] env[61570]: DEBUG nova.policy [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc5979d6f3764ab093d7c30d638b1e1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '010349794e9d4aaf8248bc8855c58453', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1292.115497] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1292.141751] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1292.141997] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1292.142170] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1292.142353] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1292.142496] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1292.142638] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1292.142837] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1292.142989] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1292.143165] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1292.143325] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1292.143494] env[61570]: DEBUG nova.virt.hardware [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1292.144372] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4139966c-4201-4f56-89cc-6078291125d8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.152837] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ab7d1f-df19-4d49-b05a-ac26155435a1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.487469] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Successfully created port: 9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.409508] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Successfully updated port: 9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1293.418253] env[61570]: DEBUG nova.compute.manager [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Received event network-vif-plugged-9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1293.418253] env[61570]: DEBUG oslo_concurrency.lockutils [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] Acquiring lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.418253] env[61570]: DEBUG oslo_concurrency.lockutils [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.418253] env[61570]: DEBUG oslo_concurrency.lockutils [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.418465] env[61570]: DEBUG nova.compute.manager [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] No waiting events found dispatching network-vif-plugged-9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1293.418465] env[61570]: WARNING nova.compute.manager [req-9a198fd8-a403-41f0-a3ad-352f6221d03f req-ecb22056-d615-414b-ae93-04368ccd24cc service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Received unexpected event network-vif-plugged-9957b7bb-8eb2-4653-be24-a91f0d82d162 for instance with vm_state building and task_state spawning. [ 1293.425398] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.425398] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquired lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.425398] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1293.496129] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1293.789961] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Updating instance_info_cache with network_info: [{"id": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "address": "fa:16:3e:4f:81:bf", "network": {"id": "406cbcaf-f50a-4e88-9f9d-ab02c5cd8345", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-811728635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "010349794e9d4aaf8248bc8855c58453", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3202e2b9-c4a7-4f78-9476-12ed92fabe61", "external-id": "nsx-vlan-transportzone-846", "segmentation_id": 846, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9957b7bb-8e", "ovs_interfaceid": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.800793] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Releasing lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.801114] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance network_info: |[{"id": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "address": "fa:16:3e:4f:81:bf", "network": {"id": "406cbcaf-f50a-4e88-9f9d-ab02c5cd8345", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-811728635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "010349794e9d4aaf8248bc8855c58453", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3202e2b9-c4a7-4f78-9476-12ed92fabe61", "external-id": "nsx-vlan-transportzone-846", "segmentation_id": 846, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9957b7bb-8e", "ovs_interfaceid": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.801541] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:81:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3202e2b9-c4a7-4f78-9476-12ed92fabe61', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9957b7bb-8eb2-4653-be24-a91f0d82d162', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.809201] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Creating folder: Project (010349794e9d4aaf8248bc8855c58453). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1293.809834] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f75bfcd-2b26-4b19-94b6-bd4fe8bcee17 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.821119] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Created folder: Project (010349794e9d4aaf8248bc8855c58453) in parent group-v953072. [ 1293.821338] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Creating folder: Instances. Parent ref: group-v953146. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1293.821581] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84437894-7fc4-4cb4-ba04-ed0774d3f470 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.833285] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Created folder: Instances in parent group-v953146. [ 1293.833570] env[61570]: DEBUG oslo.service.loopingcall [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.833781] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1293.833998] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86c0720a-f9c3-4f91-b716-8648e79223b4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.855687] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.855687] env[61570]: value = "task-4891374" [ 1293.855687] env[61570]: _type = "Task" [ 1293.855687] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.864368] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891374, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.369127] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891374, 'name': CreateVM_Task, 'duration_secs': 0.312653} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.369376] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1294.370064] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.370248] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.370582] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1294.370842] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f9f635d-7c7e-4bde-a6ab-5a79bfe5b67c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.376025] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for the task: (returnval){ [ 1294.376025] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526047e3-6c46-90e2-28ec-0d8727545e1b" [ 1294.376025] env[61570]: _type = "Task" [ 1294.376025] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.384312] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526047e3-6c46-90e2-28ec-0d8727545e1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.887305] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.887626] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1294.887779] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.448553] env[61570]: DEBUG nova.compute.manager [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Received event network-changed-9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1295.448760] env[61570]: DEBUG nova.compute.manager [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Refreshing instance network info cache due to event network-changed-9957b7bb-8eb2-4653-be24-a91f0d82d162. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1295.448983] env[61570]: DEBUG oslo_concurrency.lockutils [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] Acquiring lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.449143] env[61570]: DEBUG oslo_concurrency.lockutils [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] Acquired lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.449306] env[61570]: DEBUG nova.network.neutron [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Refreshing network info cache for port 9957b7bb-8eb2-4653-be24-a91f0d82d162 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1295.760543] env[61570]: DEBUG nova.network.neutron [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Updated VIF entry in instance network info cache for port 9957b7bb-8eb2-4653-be24-a91f0d82d162. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1295.760856] env[61570]: DEBUG nova.network.neutron [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Updating instance_info_cache with network_info: [{"id": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "address": "fa:16:3e:4f:81:bf", "network": {"id": "406cbcaf-f50a-4e88-9f9d-ab02c5cd8345", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-811728635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "010349794e9d4aaf8248bc8855c58453", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3202e2b9-c4a7-4f78-9476-12ed92fabe61", "external-id": "nsx-vlan-transportzone-846", "segmentation_id": 846, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9957b7bb-8e", "ovs_interfaceid": "9957b7bb-8eb2-4653-be24-a91f0d82d162", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.773316] env[61570]: DEBUG oslo_concurrency.lockutils [req-66fe3f80-554b-423b-b631-aae0206043fc req-0fd9aa65-f9a2-4b39-86e9-dae52fe03c7b service nova] Releasing lock "refresh_cache-5f5232f8-60f4-472f-ab6e-6273904481e8" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.186597] env[61570]: WARNING oslo_vmware.rw_handles [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1339.186597] env[61570]: ERROR oslo_vmware.rw_handles [ 1339.187240] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1339.189586] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1339.189929] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Copying Virtual Disk [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/f1fe3e88-0cef-40bd-ba5c-49af3cdcda2e/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1339.190235] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ebab4ac-808a-402a-b3b0-19e866105616 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.199045] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1339.199045] env[61570]: value = "task-4891375" [ 1339.199045] env[61570]: _type = "Task" [ 1339.199045] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.207620] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.709745] env[61570]: DEBUG oslo_vmware.exceptions [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1339.710121] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1339.710747] env[61570]: ERROR nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1339.710747] env[61570]: Faults: ['InvalidArgument'] [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Traceback (most recent call last): [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] yield resources [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self.driver.spawn(context, instance, image_meta, [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self._fetch_image_if_missing(context, vi) [ 1339.710747] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] image_cache(vi, tmp_image_ds_loc) [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] vm_util.copy_virtual_disk( [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] session._wait_for_task(vmdk_copy_task) [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return self.wait_for_task(task_ref) [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return evt.wait() [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] result = hub.switch() [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1339.711047] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return self.greenlet.switch() [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self.f(*self.args, **self.kw) [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] raise exceptions.translate_fault(task_info.error) [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Faults: ['InvalidArgument'] [ 1339.711358] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] [ 1339.711358] env[61570]: INFO nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Terminating instance [ 1339.712793] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.713017] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1339.713267] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8be53d2b-ee4c-493a-a674-316296867de2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.715724] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1339.715916] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1339.716662] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f794fd01-787c-4767-84eb-d0791564ee7c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.723760] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1339.724029] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f37c04e5-7cc4-4c5d-a2df-16a9e4bca4d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.726394] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1339.726571] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1339.727599] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcbec887-0eec-4045-aa6b-bc7a1b35a13a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.733178] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1339.733178] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52bb5c29-7837-94a1-7472-9f56071ff9a3" [ 1339.733178] env[61570]: _type = "Task" [ 1339.733178] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.744373] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52bb5c29-7837-94a1-7472-9f56071ff9a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.793732] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1339.794162] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1339.794428] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleting the datastore file [datastore2] df50c085-3eee-44c2-8d14-263f3bf49b2d {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.794717] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a8d3d57-7c71-4c63-9af4-bac7a2a0bf8e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.801296] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1339.801296] env[61570]: value = "task-4891377" [ 1339.801296] env[61570]: _type = "Task" [ 1339.801296] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.809716] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.245845] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1340.246163] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating directory with path [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.246431] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92202a58-5664-4527-91c0-4eaf67cef626 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.258946] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Created directory with path [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.259158] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Fetch image to [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1340.259330] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1340.260226] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49b95d1-533a-4c6f-865a-7f8672ebaf06 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.268211] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db14283e-4b71-4745-a38b-8a52bba6747d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.278117] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586b6c9b-1de4-42e0-8651-1914a780a6e8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.311893] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914acbf2-df77-45ab-8dc4-7a1955e9b6b3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.319498] env[61570]: DEBUG oslo_vmware.api [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078684} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.320986] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.321199] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1340.321382] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1340.321549] env[61570]: INFO nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1340.323402] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-47ca4663-83b4-434a-a3e1-7ad7cee10db1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.325339] env[61570]: DEBUG nova.compute.claims [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1340.325518] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.325765] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.354091] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1340.413901] env[61570]: DEBUG oslo_vmware.rw_handles [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1340.476575] env[61570]: DEBUG oslo_vmware.rw_handles [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1340.476760] env[61570]: DEBUG oslo_vmware.rw_handles [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1340.681967] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cbc030-8f7d-4c87-874d-b03c9f3e6fa4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.689948] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1498c13-5867-42b8-b82b-1b26d2d73534 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.720472] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e67d443-8122-42ce-8cb9-58f130e6c8f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.727515] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d09696-55cc-4407-bbef-4ccacc706613 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.740959] env[61570]: DEBUG nova.compute.provider_tree [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1340.750234] env[61570]: DEBUG nova.scheduler.client.report [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1340.766948] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.441s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.767589] env[61570]: ERROR nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1340.767589] env[61570]: Faults: ['InvalidArgument'] [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Traceback (most recent call last): [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self.driver.spawn(context, instance, image_meta, [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self._fetch_image_if_missing(context, vi) [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] image_cache(vi, tmp_image_ds_loc) [ 1340.767589] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] vm_util.copy_virtual_disk( [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] session._wait_for_task(vmdk_copy_task) [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return self.wait_for_task(task_ref) [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return evt.wait() [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] result = hub.switch() [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] return self.greenlet.switch() [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1340.768060] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] self.f(*self.args, **self.kw) [ 1340.768509] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1340.768509] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] raise exceptions.translate_fault(task_info.error) [ 1340.768509] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1340.768509] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Faults: ['InvalidArgument'] [ 1340.768509] env[61570]: ERROR nova.compute.manager [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] [ 1340.768509] env[61570]: DEBUG nova.compute.utils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1340.769965] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Build of instance df50c085-3eee-44c2-8d14-263f3bf49b2d was re-scheduled: A specified parameter was not correct: fileType [ 1340.769965] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1340.770365] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1340.770539] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1340.770695] env[61570]: DEBUG nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1340.770854] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.212999] env[61570]: DEBUG nova.network.neutron [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.235421] env[61570]: INFO nova.compute.manager [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Took 0.46 seconds to deallocate network for instance. [ 1341.342944] env[61570]: INFO nova.scheduler.client.report [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleted allocations for instance df50c085-3eee-44c2-8d14-263f3bf49b2d [ 1341.365192] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cda6ff51-c9a3-48ca-8d96-a21bf5d2f48d tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.548s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.367011] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 220.154s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.367366] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1341.367620] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.368379] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 192.718s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.368561] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.368664] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.368841] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.370756] env[61570]: INFO nova.compute.manager [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Terminating instance [ 1341.372593] env[61570]: DEBUG nova.compute.manager [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1341.372815] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1341.373403] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3048369a-3485-419a-a3a4-8e8ea07ce0bd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.384089] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df49d1d1-35a5-41ec-8fb3-76caf897d238 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.405404] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1341.421520] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance df50c085-3eee-44c2-8d14-263f3bf49b2d could not be found. [ 1341.421722] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1341.421894] env[61570]: INFO nova.compute.manager [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1341.422155] env[61570]: DEBUG oslo.service.loopingcall [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.422388] env[61570]: DEBUG nova.compute.manager [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1341.422487] env[61570]: DEBUG nova.network.neutron [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.451439] env[61570]: DEBUG nova.network.neutron [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.458969] env[61570]: INFO nova.compute.manager [-] [instance: df50c085-3eee-44c2-8d14-263f3bf49b2d] Took 0.04 seconds to deallocate network for instance. [ 1341.464471] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.464471] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.465903] env[61570]: INFO nova.compute.claims [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.547017] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0c5133eb-c49d-4c7b-a071-c56f68f28ca1 tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "df50c085-3eee-44c2-8d14-263f3bf49b2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.753167] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.753357] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1341.777998] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8c675d-2cf4-489d-a303-7397468f4f72 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.785864] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6562a64f-a6b2-4570-a6aa-c05968de0d18 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.815816] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae320e2-8d39-43a3-a1d2-2e8cb5b9b28b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.823635] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f55337-3365-4dae-8f66-5d64270360f4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.837373] env[61570]: DEBUG nova.compute.provider_tree [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.846855] env[61570]: DEBUG nova.scheduler.client.report [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1341.863700] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.399s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.864254] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1341.905962] env[61570]: DEBUG nova.compute.utils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1341.907280] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1341.907485] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1341.916557] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1341.972819] env[61570]: DEBUG nova.policy [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffd73f0e33ee403c9cf442a4b6177e94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0374f79fe6d4946a64c2acc369178cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1341.983437] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1342.009143] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.009401] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.009557] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.009738] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.009882] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.010036] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.010246] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.010408] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.010574] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.010739] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.010911] env[61570]: DEBUG nova.virt.hardware [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.011779] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6965f8f-6dd2-46c5-92d2-d83ad643904d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.021613] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854d287e-58d3-42ca-883f-1c3bda6b6860 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.489822] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Successfully created port: fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1343.615810] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Successfully updated port: fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.634552] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.634708] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.636746] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1343.718072] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1343.723217] env[61570]: DEBUG nova.compute.manager [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Received event network-vif-plugged-fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1343.723468] env[61570]: DEBUG oslo_concurrency.lockutils [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] Acquiring lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.723778] env[61570]: DEBUG oslo_concurrency.lockutils [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.723914] env[61570]: DEBUG oslo_concurrency.lockutils [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.724107] env[61570]: DEBUG nova.compute.manager [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] No waiting events found dispatching network-vif-plugged-fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1343.724296] env[61570]: WARNING nova.compute.manager [req-f03a9f3b-1ce9-4e29-9484-9492e0277150 req-daa21a1c-3ffd-457c-997a-bab031206293 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Received unexpected event network-vif-plugged-fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca for instance with vm_state building and task_state spawning. [ 1343.752816] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.770965] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.771294] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.771506] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.771670] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1343.778458] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba5a14a-4efd-4942-93e6-28962a9459bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.789621] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57371477-874d-47b2-ab9b-0d11e9ac6acc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.814710] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887efd2a-5c3e-4d40-bcee-eec97a5b8ed7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.824422] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baf39fe-952f-47d2-8daa-11036c920c38 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.858865] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180594MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1343.859043] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.859257] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.968431] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.968751] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.968942] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969148] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969327] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969496] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969666] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969831] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.969995] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.970181] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1343.984591] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.000901] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8fa50d72-12ee-4345-9ad7-07896fe34776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.019413] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ea7cddf9-4529-4716-bc4e-8490e3f5ef83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.039070] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 3dec2bfa-16bf-423d-9ce3-16da3d9e9397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.063265] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Updating instance_info_cache with network_info: [{"id": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "address": "fa:16:3e:30:53:4f", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb2c1db1-88", "ovs_interfaceid": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.082291] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 23632a70-aec8-44aa-aa56-8ebe2b91840e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.082291] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.082668] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance network_info: |[{"id": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "address": "fa:16:3e:30:53:4f", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb2c1db1-88", "ovs_interfaceid": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1344.083601] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:53:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.091643] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating folder: Project (c0374f79fe6d4946a64c2acc369178cf). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1344.092307] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba4323d4-52df-43ca-9d31-8e4f2fa6f9f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.095353] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.107167] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created folder: Project (c0374f79fe6d4946a64c2acc369178cf) in parent group-v953072. [ 1344.107167] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating folder: Instances. Parent ref: group-v953149. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1344.107167] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b312b3c6-6e6b-413b-b807-c14443cce4f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.109087] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9711e15c-ef00-47c4-afc3-b5fb93277c63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.118298] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created folder: Instances in parent group-v953149. [ 1344.118750] env[61570]: DEBUG oslo.service.loopingcall [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.119068] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1344.119428] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b643ad4-22b6-4d51-8c4f-0809601b9af3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.135441] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cb72b52-c3e2-46ea-8874-8832defc02ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1344.135928] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1344.136194] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '58', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_0ad35672443f4c9d97f0240cadfb986d': '1', 'io_workload': '10', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1', 'num_task_spawning': '4', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1344.146025] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.146025] env[61570]: value = "task-4891380" [ 1344.146025] env[61570]: _type = "Task" [ 1344.146025] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.153657] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891380, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.369078] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "db38d263-aa3d-46b1-a13d-1469155fad84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.443110] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d0a2b2-bb4d-474a-a140-3fbb43f95f0c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.450026] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfde929-ae98-4239-a68a-5a861f26f9df {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.484220] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb640e08-50ce-46e4-b8a5-08151c92749c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.494902] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8be31ec-686f-4152-9657-91e6b1010a37 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.511713] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.520925] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1344.546756] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1344.546756] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.687s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.655211] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891380, 'name': CreateVM_Task, 'duration_secs': 0.406736} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.655491] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1344.656244] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.656458] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.656822] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1344.657188] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06fd0470-935c-464d-82f7-154545d84a47 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.663187] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 1344.663187] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522743d2-580e-ebe8-eae3-d662605e211e" [ 1344.663187] env[61570]: _type = "Task" [ 1344.663187] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.671163] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522743d2-580e-ebe8-eae3-d662605e211e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.176834] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.177107] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.177325] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.872874] env[61570]: DEBUG nova.compute.manager [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Received event network-changed-fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1345.873234] env[61570]: DEBUG nova.compute.manager [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Refreshing instance network info cache due to event network-changed-fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1345.873566] env[61570]: DEBUG oslo_concurrency.lockutils [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] Acquiring lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.873829] env[61570]: DEBUG oslo_concurrency.lockutils [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] Acquired lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.874139] env[61570]: DEBUG nova.network.neutron [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Refreshing network info cache for port fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1346.344180] env[61570]: DEBUG nova.network.neutron [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Updated VIF entry in instance network info cache for port fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1346.344180] env[61570]: DEBUG nova.network.neutron [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Updating instance_info_cache with network_info: [{"id": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "address": "fa:16:3e:30:53:4f", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb2c1db1-88", "ovs_interfaceid": "fb2c1db1-8847-4c89-9e4b-6ebb2ee153ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.355990] env[61570]: DEBUG oslo_concurrency.lockutils [req-7cbe32fd-a365-41ca-ac09-ea9d9d85c98d req-ad66c5e5-b17d-4f9b-a566-4f03d4ec9a21 service nova] Releasing lock "refresh_cache-f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.550071] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.754479] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.755641] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1346.755641] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1346.784574] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.784732] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.784831] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785013] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785086] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785207] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785327] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785471] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785602] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785720] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1346.785838] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1346.786388] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.753461] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.752613] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.752714] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.367793] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.368112] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.697835] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.698138] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.748712] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.369781] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "c69c8589-88e1-481e-87b8-55608322440c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.748111] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1362.200439] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "5f5232f8-60f4-472f-ab6e-6273904481e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.605031] env[61570]: WARNING oslo_vmware.rw_handles [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1385.605031] env[61570]: ERROR oslo_vmware.rw_handles [ 1385.605628] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1385.607260] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1385.607529] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Copying Virtual Disk [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/2f27c0dd-e7c7-4e92-a1e4-ecbd7429a1e1/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1385.607809] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8fe6016-07f5-4fda-ab08-d8bacf8e4136 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.616817] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1385.616817] env[61570]: value = "task-4891381" [ 1385.616817] env[61570]: _type = "Task" [ 1385.616817] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.625339] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.127745] env[61570]: DEBUG oslo_vmware.exceptions [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1386.128078] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.128640] env[61570]: ERROR nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1386.128640] env[61570]: Faults: ['InvalidArgument'] [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] yield resources [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.driver.spawn(context, instance, image_meta, [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._fetch_image_if_missing(context, vi) [ 1386.128640] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] image_cache(vi, tmp_image_ds_loc) [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] vm_util.copy_virtual_disk( [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] session._wait_for_task(vmdk_copy_task) [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.wait_for_task(task_ref) [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return evt.wait() [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = hub.switch() [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1386.129171] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.greenlet.switch() [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.f(*self.args, **self.kw) [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exceptions.translate_fault(task_info.error) [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Faults: ['InvalidArgument'] [ 1386.129488] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1386.129488] env[61570]: INFO nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Terminating instance [ 1386.130582] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.130809] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.130985] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3c678d4-6456-4975-8b75-c4b4185c5fcf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.134043] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1386.134043] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1386.134304] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beea18aa-0454-4c56-807f-96762672c7ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.141943] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1386.142217] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-238ba2d1-7cbb-4da3-8bcc-63069417b467 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.144428] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.144600] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1386.145551] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcc93a8e-eb9b-4e88-935e-56a30dd74249 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.150530] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1386.150530] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ca72a4-3e39-3f95-e227-a248800ad46f" [ 1386.150530] env[61570]: _type = "Task" [ 1386.150530] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.159309] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ca72a4-3e39-3f95-e227-a248800ad46f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.385658] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1386.385889] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1386.386092] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleting the datastore file [datastore2] 0b77e196-4948-4a76-8e87-75e9b1e5df55 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.386381] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da4af15d-4766-4875-a7cf-9d2b2201ff73 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.394184] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for the task: (returnval){ [ 1386.394184] env[61570]: value = "task-4891383" [ 1386.394184] env[61570]: _type = "Task" [ 1386.394184] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.402601] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891383, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.661102] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1386.661374] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating directory with path [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.661695] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da43d742-562d-4a57-8cf8-39a051953a34 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.674491] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created directory with path [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.674691] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Fetch image to [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1386.674860] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1386.675653] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7237d38-a413-421f-b470-2457319a74eb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.683009] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1f3050-81c6-49a2-8b46-a2b3e4b66d8f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.692774] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90469b4d-8147-4d0b-8675-8bdfb74b0254 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.724908] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5dac56-ceb5-4abe-9252-f97bd3849f85 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.731695] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-80ac7c29-0884-43f3-987f-5f070ae47c25 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.755119] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1386.814084] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1386.873651] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1386.873843] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1386.904572] env[61570]: DEBUG oslo_vmware.api [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Task: {'id': task-4891383, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072345} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.904841] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.905036] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1386.905219] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1386.905387] env[61570]: INFO nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Took 0.77 seconds to destroy the instance on the hypervisor. [ 1386.910949] env[61570]: DEBUG nova.compute.claims [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1386.911158] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.911382] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.218791] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a472d3e8-ec91-4d4d-861e-2264462490c8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.226718] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f05a3f0-1eb2-4de6-9320-d11b178c76b5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.258138] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f977f4-fac0-40c8-a8c3-709077b1eaa0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.266465] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1ab81f-6191-4427-ad9c-3e2e4f893454 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.279954] env[61570]: DEBUG nova.compute.provider_tree [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.288759] env[61570]: DEBUG nova.scheduler.client.report [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.304336] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.393s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.304874] env[61570]: ERROR nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1387.304874] env[61570]: Faults: ['InvalidArgument'] [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.driver.spawn(context, instance, image_meta, [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._fetch_image_if_missing(context, vi) [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] image_cache(vi, tmp_image_ds_loc) [ 1387.304874] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] vm_util.copy_virtual_disk( [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] session._wait_for_task(vmdk_copy_task) [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.wait_for_task(task_ref) [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return evt.wait() [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = hub.switch() [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.greenlet.switch() [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1387.305345] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.f(*self.args, **self.kw) [ 1387.305849] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1387.305849] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exceptions.translate_fault(task_info.error) [ 1387.305849] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1387.305849] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Faults: ['InvalidArgument'] [ 1387.305849] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.305849] env[61570]: DEBUG nova.compute.utils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1387.307145] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Build of instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 was re-scheduled: A specified parameter was not correct: fileType [ 1387.307145] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1387.307528] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1387.307696] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1387.307844] env[61570]: DEBUG nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1387.308047] env[61570]: DEBUG nova.network.neutron [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1387.413052] env[61570]: DEBUG neutronclient.v2_0.client [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1387.414183] env[61570]: ERROR nova.compute.manager [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.driver.spawn(context, instance, image_meta, [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._fetch_image_if_missing(context, vi) [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] image_cache(vi, tmp_image_ds_loc) [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1387.414183] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] vm_util.copy_virtual_disk( [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] session._wait_for_task(vmdk_copy_task) [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.wait_for_task(task_ref) [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return evt.wait() [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = hub.switch() [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.greenlet.switch() [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.f(*self.args, **self.kw) [ 1387.414523] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exceptions.translate_fault(task_info.error) [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Faults: ['InvalidArgument'] [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] During handling of the above exception, another exception occurred: [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._build_and_run_instance(context, instance, image, [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exception.RescheduledException( [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] nova.exception.RescheduledException: Build of instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 was re-scheduled: A specified parameter was not correct: fileType [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Faults: ['InvalidArgument'] [ 1387.414859] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] During handling of the above exception, another exception occurred: [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] exception_handler_v20(status_code, error_body) [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise client_exc(message=error_message, [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Neutron server returns request_ids: ['req-215f3ced-14ea-46e7-9c10-7cf38873574c'] [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.415249] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] During handling of the above exception, another exception occurred: [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._deallocate_network(context, instance, requested_networks) [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.network_api.deallocate_for_instance( [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] data = neutron.list_ports(**search_opts) [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.list('ports', self.ports_path, retrieve_all, [ 1387.415595] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] for r in self._pagination(collection, path, **params): [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] res = self.get(path, params=params) [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.retry_request("GET", action, body=body, [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1387.415945] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.do_request(method, action, body=body, [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._handle_fault_response(status_code, replybody, resp) [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exception.Unauthorized() [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] nova.exception.Unauthorized: Not authorized. [ 1387.416313] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.476554] env[61570]: INFO nova.scheduler.client.report [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Deleted allocations for instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 [ 1387.500189] env[61570]: DEBUG oslo_concurrency.lockutils [None req-39f13e69-1dda-4782-a60d-8752bc5aa18b tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.281s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.501415] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.780s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.501629] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Acquiring lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.501827] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.502096] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.504159] env[61570]: INFO nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Terminating instance [ 1387.505824] env[61570]: DEBUG nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1387.506037] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1387.506506] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-632ce2c7-8a87-4cbd-b956-a6b4c4bce949 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.515969] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382941b6-3d99-4739-ac79-8389ebd82aec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.527798] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1387.556686] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b77e196-4948-4a76-8e87-75e9b1e5df55 could not be found. [ 1387.556985] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1387.557247] env[61570]: INFO nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1387.557560] env[61570]: DEBUG oslo.service.loopingcall [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.557892] env[61570]: DEBUG nova.compute.manager [-] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1387.558156] env[61570]: DEBUG nova.network.neutron [-] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1387.597033] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.597440] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.599727] env[61570]: INFO nova.compute.claims [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1387.662180] env[61570]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1387.662453] env[61570]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-920ee1f9-3d81-4529-bc7d-10573b4f59f7'] [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1387.662782] env[61570]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1387.663196] env[61570]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.663589] env[61570]: ERROR oslo.service.loopingcall [ 1387.663968] env[61570]: ERROR nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.697298] env[61570]: ERROR nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] exception_handler_v20(status_code, error_body) [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise client_exc(message=error_message, [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Neutron server returns request_ids: ['req-920ee1f9-3d81-4529-bc7d-10573b4f59f7'] [ 1387.697298] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] During handling of the above exception, another exception occurred: [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Traceback (most recent call last): [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._delete_instance(context, instance, bdms) [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._shutdown_instance(context, instance, bdms) [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._try_deallocate_network(context, instance, requested_networks) [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] with excutils.save_and_reraise_exception(): [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.697683] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.force_reraise() [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise self.value [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] _deallocate_network_with_retries() [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return evt.wait() [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = hub.switch() [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.greenlet.switch() [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1387.698085] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = func(*self.args, **self.kw) [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] result = f(*args, **kwargs) [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._deallocate_network( [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self.network_api.deallocate_for_instance( [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] data = neutron.list_ports(**search_opts) [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.list('ports', self.ports_path, retrieve_all, [ 1387.698356] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] for r in self._pagination(collection, path, **params): [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] res = self.get(path, params=params) [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.retry_request("GET", action, body=body, [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1387.698682] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] return self.do_request(method, action, body=body, [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] ret = obj(*args, **kwargs) [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] self._handle_fault_response(status_code, replybody, resp) [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.699075] env[61570]: ERROR nova.compute.manager [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] [ 1387.730033] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.731251] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 266.518s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.731251] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] During sync_power_state the instance has a pending task (deleting). Skip. [ 1387.731436] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "0b77e196-4948-4a76-8e87-75e9b1e5df55" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.789401] env[61570]: INFO nova.compute.manager [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] [instance: 0b77e196-4948-4a76-8e87-75e9b1e5df55] Successfully reverted task state from None on failure for instance. [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server [None req-a5c2adda-a1fe-45af-b4cc-33f906b5bc6c tempest-ListImageFiltersTestJSON-1169271434 tempest-ListImageFiltersTestJSON-1169271434-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-920ee1f9-3d81-4529-bc7d-10573b4f59f7'] [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1387.793266] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1387.793660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1387.794222] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1387.794660] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1387.795102] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1387.795561] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1387.796050] env[61570]: ERROR oslo_messaging.rpc.server [ 1387.916759] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ceb3207-5592-4a0c-bd68-9dd9c14f50cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.925652] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5b0c6a-1b57-4749-9d41-da8514c92355 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.959155] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf002c4f-3cb0-46b0-b669-b82eb104135a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.967347] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bec29cc-dd9f-4584-ac81-db65b1a5b17a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.981382] env[61570]: DEBUG nova.compute.provider_tree [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.992731] env[61570]: DEBUG nova.scheduler.client.report [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1388.011479] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.012036] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1388.052276] env[61570]: DEBUG nova.compute.utils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1388.053546] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1388.054536] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1388.066038] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1388.134699] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1388.147095] env[61570]: DEBUG nova.policy [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a7a7f44b98e4eacb56d11d43dc3cad4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '743eeefd02e04e63850742fc5590125f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1388.160910] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1388.161164] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1388.161318] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1388.161498] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1388.161650] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1388.161796] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1388.161995] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1388.162166] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1388.162329] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1388.162564] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1388.162653] env[61570]: DEBUG nova.virt.hardware [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1388.163537] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c895f24-eb47-4b8e-964a-235cff628158 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.173850] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c81d69-8a7d-4dc5-9111-3533c04cd0b8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.585678] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Successfully created port: ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1388.724603] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.510245] env[61570]: DEBUG nova.compute.manager [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Received event network-vif-plugged-ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1389.510465] env[61570]: DEBUG oslo_concurrency.lockutils [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] Acquiring lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.510671] env[61570]: DEBUG oslo_concurrency.lockutils [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.510834] env[61570]: DEBUG oslo_concurrency.lockutils [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.510996] env[61570]: DEBUG nova.compute.manager [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] No waiting events found dispatching network-vif-plugged-ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1389.511891] env[61570]: WARNING nova.compute.manager [req-ea528b5d-37a6-4a3d-9bc9-f66b34bbf4a9 req-9ae38256-f5ec-4359-bec3-5957faa43f50 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Received unexpected event network-vif-plugged-ad7a21d1-9d87-4370-96a2-f5488fb3ce6d for instance with vm_state building and task_state spawning. [ 1389.619142] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Successfully updated port: ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1389.634331] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.634589] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.634829] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1389.690203] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1389.894330] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Updating instance_info_cache with network_info: [{"id": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "address": "fa:16:3e:20:4e:c4", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad7a21d1-9d", "ovs_interfaceid": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.906430] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.907061] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance network_info: |[{"id": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "address": "fa:16:3e:20:4e:c4", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad7a21d1-9d", "ovs_interfaceid": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1389.907206] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:4e:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad7a21d1-9d87-4370-96a2-f5488fb3ce6d', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1389.915267] env[61570]: DEBUG oslo.service.loopingcall [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1389.915884] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1389.916169] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8addf573-1d98-4368-b10b-bb665e4cf776 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.937536] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1389.937536] env[61570]: value = "task-4891384" [ 1389.937536] env[61570]: _type = "Task" [ 1389.937536] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.947487] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891384, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.448338] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891384, 'name': CreateVM_Task, 'duration_secs': 0.306261} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.448502] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1390.449221] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.449401] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.449708] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1390.449954] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7848202-999e-4366-943b-9df68de2fa06 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.455552] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1390.455552] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5292ad25-f0e5-645f-5757-96a73b278643" [ 1390.455552] env[61570]: _type = "Task" [ 1390.455552] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.464513] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5292ad25-f0e5-645f-5757-96a73b278643, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.508060] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.966833] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.967122] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.967339] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.545540] env[61570]: DEBUG nova.compute.manager [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Received event network-changed-ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1391.545769] env[61570]: DEBUG nova.compute.manager [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Refreshing instance network info cache due to event network-changed-ad7a21d1-9d87-4370-96a2-f5488fb3ce6d. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1391.546025] env[61570]: DEBUG oslo_concurrency.lockutils [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] Acquiring lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.546162] env[61570]: DEBUG oslo_concurrency.lockutils [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] Acquired lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.546314] env[61570]: DEBUG nova.network.neutron [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Refreshing network info cache for port ad7a21d1-9d87-4370-96a2-f5488fb3ce6d {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1391.922831] env[61570]: DEBUG nova.network.neutron [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Updated VIF entry in instance network info cache for port ad7a21d1-9d87-4370-96a2-f5488fb3ce6d. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1391.923203] env[61570]: DEBUG nova.network.neutron [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Updating instance_info_cache with network_info: [{"id": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "address": "fa:16:3e:20:4e:c4", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad7a21d1-9d", "ovs_interfaceid": "ad7a21d1-9d87-4370-96a2-f5488fb3ce6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.933627] env[61570]: DEBUG oslo_concurrency.lockutils [req-93a250d6-1040-4d81-9055-5f3803b5a654 req-48bf7208-d04e-4735-9882-5a72cec6a1c4 service nova] Releasing lock "refresh_cache-70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.754744] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.754744] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1403.753300] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1403.769425] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.769425] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.769425] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.769425] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1403.770301] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b00c02-342d-4cbd-8b94-b36b84789e56 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.779702] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d974d46-0572-4f26-b44e-3f452017e0c5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.794398] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb93231-bda6-4a55-995b-c123cb2ec5bc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.801449] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec2b1d1-3da5-4d76-b00f-0941374fd770 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.831910] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180580MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1403.832079] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.832293] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.993066] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993240] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993369] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993491] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993610] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993726] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993842] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.993956] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.994092] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1403.994247] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1404.010744] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ea7cddf9-4529-4716-bc4e-8490e3f5ef83 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.022990] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 3dec2bfa-16bf-423d-9ce3-16da3d9e9397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.035015] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 23632a70-aec8-44aa-aa56-8ebe2b91840e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.047318] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.060802] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9711e15c-ef00-47c4-afc3-b5fb93277c63 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.074609] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cb72b52-c3e2-46ea-8874-8832defc02ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.088472] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.099337] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.112680] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1404.112680] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1404.112894] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '59', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_743eeefd02e04e63850742fc5590125f': '2', 'io_workload': '10', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1404.135711] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.135957] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.363617] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ee636a-6b10-405e-8442-c949ef7c00b9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.371779] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9444ae50-55f9-4506-b2fc-1e35ea6b0e35 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.401327] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6113a092-7106-465d-8305-4890b5924928 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.409143] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b730f48f-5f0d-4ed4-815f-548a3c05b5d6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.422690] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1404.432781] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1404.449017] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1404.449260] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.617s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.752715] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1404.752884] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1404.762650] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1405.763645] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.753632] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.753980] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1407.753980] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1407.776813] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777033] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777220] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777400] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777558] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777711] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777843] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.777962] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.778101] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.778251] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1407.778373] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1408.752839] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.753133] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.456856] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.457262] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.752910] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.753162] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.748698] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.753973] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.761642] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.761975] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1434.759743] env[61570]: WARNING oslo_vmware.rw_handles [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1434.759743] env[61570]: ERROR oslo_vmware.rw_handles [ 1434.760413] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1434.762105] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1434.762371] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Copying Virtual Disk [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/1084fb82-8d60-4c75-82a7-25dfe94897a2/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1434.763050] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebac9fef-c7ef-4c7e-bf5e-9172f9b9960c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.771925] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1434.771925] env[61570]: value = "task-4891385" [ 1434.771925] env[61570]: _type = "Task" [ 1434.771925] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.781013] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.282604] env[61570]: DEBUG oslo_vmware.exceptions [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1435.282878] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.283438] env[61570]: ERROR nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1435.283438] env[61570]: Faults: ['InvalidArgument'] [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Traceback (most recent call last): [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] yield resources [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self.driver.spawn(context, instance, image_meta, [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self._fetch_image_if_missing(context, vi) [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1435.283438] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] image_cache(vi, tmp_image_ds_loc) [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] vm_util.copy_virtual_disk( [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] session._wait_for_task(vmdk_copy_task) [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return self.wait_for_task(task_ref) [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return evt.wait() [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] result = hub.switch() [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return self.greenlet.switch() [ 1435.283774] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self.f(*self.args, **self.kw) [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] raise exceptions.translate_fault(task_info.error) [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Faults: ['InvalidArgument'] [ 1435.284100] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] [ 1435.284100] env[61570]: INFO nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Terminating instance [ 1435.285428] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.285533] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.285760] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d4210dd-01a3-47e3-b88c-23ac2e935e48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.288268] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1435.288481] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1435.289220] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f930c8-9055-439c-be0d-2caafb3670e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.296179] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1435.296427] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-376d981d-cd4c-4f2d-9238-6eb8c67c22ef {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.298769] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.298944] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1435.300045] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3515acd-4c9f-4e26-a235-4978d3d12a61 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.305921] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 1435.305921] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f22904-885e-536a-706a-c65c29fbf474" [ 1435.305921] env[61570]: _type = "Task" [ 1435.305921] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.316987] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f22904-885e-536a-706a-c65c29fbf474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.375884] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1435.376148] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1435.376338] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleting the datastore file [datastore2] 8ef1d751-e809-46e0-b98f-ac90ab076889 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1435.376628] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b3755c4-5f11-4ef6-b2f7-b5f28c232ed6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.384090] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1435.384090] env[61570]: value = "task-4891387" [ 1435.384090] env[61570]: _type = "Task" [ 1435.384090] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.393894] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.817047] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1435.817353] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating directory with path [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.817543] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2538ac4a-b961-49b3-a997-d6598546d373 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.830019] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Created directory with path [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.830231] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Fetch image to [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1435.830416] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1435.831201] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c40572-8e3c-49eb-8c2f-945b502b6011 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.838475] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98464862-7aca-452f-92f5-d195d2d5f879 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.848239] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d144175b-56ad-4f51-bb34-abcf89f30264 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.878606] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746eed79-6f4a-41d1-a517-d2149018b1cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.887923] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d664c5e0-7f22-47a2-926d-e5f2f2c77d80 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.894540] env[61570]: DEBUG oslo_vmware.api [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065851} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.894815] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1435.895015] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1435.895194] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1435.895366] env[61570]: INFO nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1435.897586] env[61570]: DEBUG nova.compute.claims [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1435.897758] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.897966] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.914323] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1435.967860] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1436.028292] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1436.028510] env[61570]: DEBUG oslo_vmware.rw_handles [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1436.218365] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8cecbb-418a-4f84-8698-6c8f4a2ee063 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.226453] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55267588-e31e-4123-977d-2596bf6066ea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.258018] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c1121e-aa0a-4af2-9b9a-866239662fbc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.266884] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e579229-a419-4a78-8330-3fa08777d07f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.282098] env[61570]: DEBUG nova.compute.provider_tree [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.291063] env[61570]: DEBUG nova.scheduler.client.report [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1436.306457] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.408s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.306796] env[61570]: ERROR nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1436.306796] env[61570]: Faults: ['InvalidArgument'] [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Traceback (most recent call last): [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self.driver.spawn(context, instance, image_meta, [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self._fetch_image_if_missing(context, vi) [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] image_cache(vi, tmp_image_ds_loc) [ 1436.306796] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] vm_util.copy_virtual_disk( [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] session._wait_for_task(vmdk_copy_task) [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return self.wait_for_task(task_ref) [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return evt.wait() [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] result = hub.switch() [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] return self.greenlet.switch() [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1436.307121] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] self.f(*self.args, **self.kw) [ 1436.307574] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1436.307574] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] raise exceptions.translate_fault(task_info.error) [ 1436.307574] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1436.307574] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Faults: ['InvalidArgument'] [ 1436.307574] env[61570]: ERROR nova.compute.manager [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] [ 1436.307574] env[61570]: DEBUG nova.compute.utils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1436.309099] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Build of instance 8ef1d751-e809-46e0-b98f-ac90ab076889 was re-scheduled: A specified parameter was not correct: fileType [ 1436.309099] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1436.309496] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1436.309691] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1436.309866] env[61570]: DEBUG nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1436.310041] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1436.602819] env[61570]: DEBUG nova.network.neutron [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.615034] env[61570]: INFO nova.compute.manager [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Took 0.30 seconds to deallocate network for instance. [ 1436.714555] env[61570]: INFO nova.scheduler.client.report [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleted allocations for instance 8ef1d751-e809-46e0-b98f-ac90ab076889 [ 1436.738184] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d07a22de-7ee7-4ed7-9ecc-859601c9d1b5 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.158s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.739579] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.922s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.739746] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.739960] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.740144] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.745025] env[61570]: INFO nova.compute.manager [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Terminating instance [ 1436.746795] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.746957] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.747140] env[61570]: DEBUG nova.network.neutron [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1436.755534] env[61570]: DEBUG nova.compute.manager [None req-191b5b41-9199-425c-8347-62df22c71112 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 8fa50d72-12ee-4345-9ad7-07896fe34776] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1436.786406] env[61570]: DEBUG nova.network.neutron [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1436.793561] env[61570]: DEBUG nova.compute.manager [None req-191b5b41-9199-425c-8347-62df22c71112 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 8fa50d72-12ee-4345-9ad7-07896fe34776] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1436.818302] env[61570]: DEBUG oslo_concurrency.lockutils [None req-191b5b41-9199-425c-8347-62df22c71112 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "8fa50d72-12ee-4345-9ad7-07896fe34776" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.568s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.828412] env[61570]: DEBUG nova.compute.manager [None req-1d9a1c27-6250-4d1c-806e-dd57bd45d341 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: ea7cddf9-4529-4716-bc4e-8490e3f5ef83] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1436.856798] env[61570]: DEBUG nova.compute.manager [None req-1d9a1c27-6250-4d1c-806e-dd57bd45d341 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: ea7cddf9-4529-4716-bc4e-8490e3f5ef83] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1436.885472] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1d9a1c27-6250-4d1c-806e-dd57bd45d341 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "ea7cddf9-4529-4716-bc4e-8490e3f5ef83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.086s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.897398] env[61570]: DEBUG nova.compute.manager [None req-c0100984-04e9-4b0c-803d-fcd42a8d8322 tempest-ServersTestJSON-1682873444 tempest-ServersTestJSON-1682873444-project-member] [instance: 3dec2bfa-16bf-423d-9ce3-16da3d9e9397] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1436.924016] env[61570]: DEBUG nova.compute.manager [None req-c0100984-04e9-4b0c-803d-fcd42a8d8322 tempest-ServersTestJSON-1682873444 tempest-ServersTestJSON-1682873444-project-member] [instance: 3dec2bfa-16bf-423d-9ce3-16da3d9e9397] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1436.938912] env[61570]: DEBUG nova.network.neutron [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.948216] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "refresh_cache-8ef1d751-e809-46e0-b98f-ac90ab076889" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.949149] env[61570]: DEBUG nova.compute.manager [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1436.949149] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1436.949528] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-946150ac-538e-44d3-b593-f23740994afa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.953917] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c0100984-04e9-4b0c-803d-fcd42a8d8322 tempest-ServersTestJSON-1682873444 tempest-ServersTestJSON-1682873444-project-member] Lock "3dec2bfa-16bf-423d-9ce3-16da3d9e9397" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.222s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.960860] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761ee4cc-4c96-4819-8a84-2b6df12d43ce {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.973173] env[61570]: DEBUG nova.compute.manager [None req-0019c138-f0bd-487b-a08d-4f60f24a9674 tempest-ServerShowV257Test-1481275900 tempest-ServerShowV257Test-1481275900-project-member] [instance: 23632a70-aec8-44aa-aa56-8ebe2b91840e] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1436.997551] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ef1d751-e809-46e0-b98f-ac90ab076889 could not be found. [ 1436.997751] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1436.997934] env[61570]: INFO nova.compute.manager [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1436.998202] env[61570]: DEBUG oslo.service.loopingcall [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1436.998491] env[61570]: DEBUG nova.compute.manager [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1436.998585] env[61570]: DEBUG nova.network.neutron [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1437.001181] env[61570]: DEBUG nova.compute.manager [None req-0019c138-f0bd-487b-a08d-4f60f24a9674 tempest-ServerShowV257Test-1481275900 tempest-ServerShowV257Test-1481275900-project-member] [instance: 23632a70-aec8-44aa-aa56-8ebe2b91840e] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1437.019655] env[61570]: DEBUG nova.network.neutron [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1437.024138] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0019c138-f0bd-487b-a08d-4f60f24a9674 tempest-ServerShowV257Test-1481275900 tempest-ServerShowV257Test-1481275900-project-member] Lock "23632a70-aec8-44aa-aa56-8ebe2b91840e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.172s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.028317] env[61570]: DEBUG nova.network.neutron [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.034323] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1437.037936] env[61570]: INFO nova.compute.manager [-] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] Took 0.04 seconds to deallocate network for instance. [ 1437.102098] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.102534] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.104714] env[61570]: INFO nova.compute.claims [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.145875] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3112c27d-aed5-465e-907c-08bea958d4c4 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.406s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.147297] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 315.934s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.147630] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8ef1d751-e809-46e0-b98f-ac90ab076889] During sync_power_state the instance has a pending task (deleting). Skip. [ 1437.147923] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "8ef1d751-e809-46e0-b98f-ac90ab076889" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.394382] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b2db85-46b5-4e74-ad13-e10ddabeb785 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.403043] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f914e00-225f-49ae-9af1-8f1b5b7803f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.436518] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01aed29-04a8-4dec-83dc-cdb31312e3de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.445191] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d38d17-7b97-401a-8bb2-e464c32f4637 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.459742] env[61570]: DEBUG nova.compute.provider_tree [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.469793] env[61570]: DEBUG nova.scheduler.client.report [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.489924] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.490459] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1437.527764] env[61570]: DEBUG nova.compute.utils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.529928] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1437.530243] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1437.554860] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1437.629025] env[61570]: DEBUG nova.policy [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ee9cedabee24030bd9583b9e6ff7f07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e239f92d55742a7bf8a5bbc33ca718b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1437.697877] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1437.726416] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1437.726675] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1437.726837] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.727041] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1437.727201] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.727351] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1437.727561] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1437.727781] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1437.727889] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1437.728072] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1437.728266] env[61570]: DEBUG nova.virt.hardware [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1437.729251] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda9a86c-70d1-4997-b9ca-a68ef177ee9e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.738296] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6815ef-bd3c-4ead-8b1c-7db4d45f3e67 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.991027] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Successfully created port: cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.684654] env[61570]: DEBUG nova.compute.manager [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Received event network-vif-plugged-cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1438.684900] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] Acquiring lock "04741959-c2c4-4b38-92e7-43f941818775-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.685121] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] Lock "04741959-c2c4-4b38-92e7-43f941818775-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.685295] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] Lock "04741959-c2c4-4b38-92e7-43f941818775-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.685458] env[61570]: DEBUG nova.compute.manager [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] No waiting events found dispatching network-vif-plugged-cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1438.685622] env[61570]: WARNING nova.compute.manager [req-3c4de018-26c5-4a6e-9073-039c1b1b9a06 req-f2351d64-da7a-4f8a-a1e5-cc809b759d43 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Received unexpected event network-vif-plugged-cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b for instance with vm_state building and task_state spawning. [ 1438.819708] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Successfully updated port: cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1438.835180] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.835359] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.835481] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1439.083625] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1439.267904] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Updating instance_info_cache with network_info: [{"id": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "address": "fa:16:3e:d2:1f:1e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdeb5873-5c", "ovs_interfaceid": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.285978] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.286315] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance network_info: |[{"id": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "address": "fa:16:3e:d2:1f:1e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdeb5873-5c", "ovs_interfaceid": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1439.287095] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:1f:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1439.294753] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating folder: Project (1e239f92d55742a7bf8a5bbc33ca718b). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1439.295391] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-293dc689-ca65-4996-b6aa-09ca0dce2513 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.307488] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created folder: Project (1e239f92d55742a7bf8a5bbc33ca718b) in parent group-v953072. [ 1439.307740] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating folder: Instances. Parent ref: group-v953153. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1439.308342] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-432b8b22-f7b9-4ef3-8155-dca83b6e502a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.319791] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created folder: Instances in parent group-v953153. [ 1439.324025] env[61570]: DEBUG oslo.service.loopingcall [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1439.324025] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1439.324025] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e16ccf4b-6632-4a49-aa23-271cf62044e2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.341546] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1439.341546] env[61570]: value = "task-4891390" [ 1439.341546] env[61570]: _type = "Task" [ 1439.341546] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.350148] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891390, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.852449] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891390, 'name': CreateVM_Task, 'duration_secs': 0.309228} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.852643] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1439.853281] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.853441] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.853757] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1439.854015] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cac6f83f-6a98-4d7f-869b-eaacafe6df2f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.858975] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 1439.858975] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ef4163-380b-c77f-64a0-d66dc844b850" [ 1439.858975] env[61570]: _type = "Task" [ 1439.858975] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.868523] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ef4163-380b-c77f-64a0-d66dc844b850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.374547] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.374985] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.375291] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.715774] env[61570]: DEBUG nova.compute.manager [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Received event network-changed-cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1440.715973] env[61570]: DEBUG nova.compute.manager [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Refreshing instance network info cache due to event network-changed-cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1440.716199] env[61570]: DEBUG oslo_concurrency.lockutils [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] Acquiring lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.716340] env[61570]: DEBUG oslo_concurrency.lockutils [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] Acquired lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.716596] env[61570]: DEBUG nova.network.neutron [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Refreshing network info cache for port cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1441.009683] env[61570]: DEBUG nova.network.neutron [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Updated VIF entry in instance network info cache for port cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1441.010078] env[61570]: DEBUG nova.network.neutron [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Updating instance_info_cache with network_info: [{"id": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "address": "fa:16:3e:d2:1f:1e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdeb5873-5c", "ovs_interfaceid": "cdeb5873-5cdc-4cf2-9a88-ae5d1de2e02b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.019944] env[61570]: DEBUG oslo_concurrency.lockutils [req-61b5d11b-8aa6-4c7e-bdc2-38b39c949a75 req-8eef2fdf-8f6e-4436-91fc-1c0d4b625183 service nova] Releasing lock "refresh_cache-04741959-c2c4-4b38-92e7-43f941818775" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.689265] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "04741959-c2c4-4b38-92e7-43f941818775" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.764504] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.764895] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1465.754081] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1465.764524] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.764737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.764902] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.765065] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1465.766204] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e64b4a-24f1-470d-bf14-77a87c7384ae {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.775425] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf60b9c-773a-4f59-89e9-f62aa629602b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.789575] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae4e732-00db-49c3-8dff-9ff9dd6ef4a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.796349] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e062e40-1351-4931-b870-f46fd490fd0a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.824906] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180588MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1465.825060] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.825195] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.903139] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903320] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903449] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903573] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903690] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903805] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.903921] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.904044] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.904159] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.904271] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1465.916795] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cb72b52-c3e2-46ea-8874-8832defc02ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1465.928484] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1465.939802] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1465.951235] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1465.962514] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1465.962763] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1465.962908] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '64', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_21713d88a2d5483f89ae59404d3aa235': '1', 'io_workload': '10', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1465.979778] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1465.996053] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1465.996053] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1466.007479] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1466.027088] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1466.221099] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c803d2a-428c-408a-a44f-aa62cd544f37 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.229355] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27e4586-1e86-4873-9ccd-9c8d3318ef74 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.259144] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bb572e-afef-4cba-9e29-c1b6d18b2ea2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.267341] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f07744-5bfe-4872-abb4-32089fa9a940 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.281120] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.290358] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1466.325155] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1466.325338] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.500s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.325611] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.753328] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.753531] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1467.753614] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1467.776876] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777091] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777268] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777398] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777523] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777646] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777767] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.777892] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.778015] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.778139] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1467.778260] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1468.752967] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.753698] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.754214] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.754282] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.748716] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.748835] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.825968] env[61570]: WARNING oslo_vmware.rw_handles [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1483.825968] env[61570]: ERROR oslo_vmware.rw_handles [ 1483.826595] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1483.828320] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1483.828598] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Copying Virtual Disk [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/83aac655-312f-4833-bfd7-cec7a0d067c6/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1483.828922] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45c57eee-1aa8-41bf-81e9-bac9a05d2708 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.837579] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 1483.837579] env[61570]: value = "task-4891391" [ 1483.837579] env[61570]: _type = "Task" [ 1483.837579] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.846123] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.349346] env[61570]: DEBUG oslo_vmware.exceptions [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1484.349670] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.350344] env[61570]: ERROR nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1484.350344] env[61570]: Faults: ['InvalidArgument'] [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] yield resources [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.driver.spawn(context, instance, image_meta, [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._fetch_image_if_missing(context, vi) [ 1484.350344] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] image_cache(vi, tmp_image_ds_loc) [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] vm_util.copy_virtual_disk( [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] session._wait_for_task(vmdk_copy_task) [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.wait_for_task(task_ref) [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return evt.wait() [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = hub.switch() [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1484.350700] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.greenlet.switch() [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.f(*self.args, **self.kw) [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exceptions.translate_fault(task_info.error) [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Faults: ['InvalidArgument'] [ 1484.351021] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1484.351021] env[61570]: INFO nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Terminating instance [ 1484.352345] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.352595] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.352796] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24bf49c4-8ed5-4fd4-aee4-e8f617c35148 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.355138] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1484.355334] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1484.356094] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3a51c5-1dfe-4161-a5cc-0b204a17d363 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.363673] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1484.364825] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b650cde1-74c4-450c-ae6b-4b0128f2fcbd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.366160] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.366332] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1484.367314] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5fc24a3-7c4f-461a-b4ec-10ca3cf533b8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.372847] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 1484.372847] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52fa36d3-122f-fd39-5251-bde7749d8071" [ 1484.372847] env[61570]: _type = "Task" [ 1484.372847] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.381331] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52fa36d3-122f-fd39-5251-bde7749d8071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.436680] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1484.437249] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1484.437249] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleting the datastore file [datastore2] ddab6060-65d2-4ecc-b4ff-b57271af9d9e {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1484.437444] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f28785a-bf02-484c-a460-c98fc07593ff {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.445297] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for the task: (returnval){ [ 1484.445297] env[61570]: value = "task-4891393" [ 1484.445297] env[61570]: _type = "Task" [ 1484.445297] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.454344] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.883587] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1484.883962] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.884093] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d67a684-3f80-4b99-a419-c9a3c79bef48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.896105] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.896301] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Fetch image to [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1484.896473] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1484.897336] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a2555c-8aee-4726-8211-a66d30a10a5d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.904160] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4249ef6b-94dc-419f-8b0c-1951d067a75c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.913427] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2053095d-1fb3-423a-b872-db4a89d4fd89 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.945388] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d537d21f-8b1f-42b7-8149-e65251e798a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.957471] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-097cc973-afc5-4e11-823b-775ac6e3e37c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.959272] env[61570]: DEBUG oslo_vmware.api [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Task: {'id': task-4891393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086272} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.959523] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1484.959703] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1484.959877] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1484.960100] env[61570]: INFO nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1484.962248] env[61570]: DEBUG nova.compute.claims [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1484.962465] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.962684] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.982210] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1485.040221] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1485.099593] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1485.099788] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1485.257211] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db4aac6-e354-41e1-b0f4-54456c206712 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.265743] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b7abe5-d92e-4b7f-be08-da679cb168ea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.295627] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c8511d-9108-49f5-8940-d3f336950acf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.303670] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc58391b-7a1b-4f7d-a11a-3c5b222e5c56 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.318603] env[61570]: DEBUG nova.compute.provider_tree [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.327182] env[61570]: DEBUG nova.scheduler.client.report [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1485.343516] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.344120] env[61570]: ERROR nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1485.344120] env[61570]: Faults: ['InvalidArgument'] [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.driver.spawn(context, instance, image_meta, [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._fetch_image_if_missing(context, vi) [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] image_cache(vi, tmp_image_ds_loc) [ 1485.344120] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] vm_util.copy_virtual_disk( [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] session._wait_for_task(vmdk_copy_task) [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.wait_for_task(task_ref) [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return evt.wait() [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = hub.switch() [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.greenlet.switch() [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1485.344506] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.f(*self.args, **self.kw) [ 1485.344809] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1485.344809] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exceptions.translate_fault(task_info.error) [ 1485.344809] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1485.344809] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Faults: ['InvalidArgument'] [ 1485.344809] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.344809] env[61570]: DEBUG nova.compute.utils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1485.346586] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Build of instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e was re-scheduled: A specified parameter was not correct: fileType [ 1485.346586] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1485.346992] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1485.347192] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1485.347345] env[61570]: DEBUG nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1485.347507] env[61570]: DEBUG nova.network.neutron [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1485.492248] env[61570]: DEBUG neutronclient.v2_0.client [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1485.493357] env[61570]: ERROR nova.compute.manager [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.driver.spawn(context, instance, image_meta, [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._fetch_image_if_missing(context, vi) [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] image_cache(vi, tmp_image_ds_loc) [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1485.493357] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] vm_util.copy_virtual_disk( [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] session._wait_for_task(vmdk_copy_task) [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.wait_for_task(task_ref) [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return evt.wait() [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = hub.switch() [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.greenlet.switch() [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.f(*self.args, **self.kw) [ 1485.493720] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exceptions.translate_fault(task_info.error) [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Faults: ['InvalidArgument'] [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] During handling of the above exception, another exception occurred: [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._build_and_run_instance(context, instance, image, [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exception.RescheduledException( [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] nova.exception.RescheduledException: Build of instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e was re-scheduled: A specified parameter was not correct: fileType [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Faults: ['InvalidArgument'] [ 1485.494110] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] During handling of the above exception, another exception occurred: [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] exception_handler_v20(status_code, error_body) [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise client_exc(message=error_message, [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Neutron server returns request_ids: ['req-7ffb7344-b5bd-46ea-8983-ac26ccbf1b6c'] [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494499] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] During handling of the above exception, another exception occurred: [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._deallocate_network(context, instance, requested_networks) [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.network_api.deallocate_for_instance( [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] data = neutron.list_ports(**search_opts) [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.list('ports', self.ports_path, retrieve_all, [ 1485.494815] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] for r in self._pagination(collection, path, **params): [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] res = self.get(path, params=params) [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.retry_request("GET", action, body=body, [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1485.495239] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.do_request(method, action, body=body, [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._handle_fault_response(status_code, replybody, resp) [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exception.Unauthorized() [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] nova.exception.Unauthorized: Not authorized. [ 1485.495577] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1485.548743] env[61570]: INFO nova.scheduler.client.report [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Deleted allocations for instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e [ 1485.582320] env[61570]: DEBUG oslo_concurrency.lockutils [None req-3012bb5e-ea4d-41f6-8370-81abe67f9fd3 tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.836s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.583699] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.426s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.584258] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.584901] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.584901] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.586752] env[61570]: INFO nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Terminating instance [ 1485.588613] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquiring lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.588801] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Acquired lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.589080] env[61570]: DEBUG nova.network.neutron [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1485.596036] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 9711e15c-ef00-47c4-afc3-b5fb93277c63] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1485.624780] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 9711e15c-ef00-47c4-afc3-b5fb93277c63] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1485.652973] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "9711e15c-ef00-47c4-afc3-b5fb93277c63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.143s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.668666] env[61570]: DEBUG nova.compute.manager [None req-df91f14b-7087-4aa6-af20-04d1bfd682b8 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 7cb72b52-c3e2-46ea-8874-8832defc02ea] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1485.697576] env[61570]: DEBUG nova.compute.manager [None req-df91f14b-7087-4aa6-af20-04d1bfd682b8 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 7cb72b52-c3e2-46ea-8874-8832defc02ea] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1485.721767] env[61570]: DEBUG oslo_concurrency.lockutils [None req-df91f14b-7087-4aa6-af20-04d1bfd682b8 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "7cb72b52-c3e2-46ea-8874-8832defc02ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.782s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.732615] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1485.798267] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.798534] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.800166] env[61570]: INFO nova.compute.claims [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1485.987623] env[61570]: DEBUG nova.network.neutron [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Updating instance_info_cache with network_info: [{"id": "31f1b2a2-c635-4000-b033-829745ea0fea", "address": "fa:16:3e:a2:bb:d9", "network": {"id": "613598d6-54d8-4849-a54b-6c40ce8fce47", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9026853fead54dcba8cbc4240690b9df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31f1b2a2-c6", "ovs_interfaceid": "31f1b2a2-c635-4000-b033-829745ea0fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.999836] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Releasing lock "refresh_cache-ddab6060-65d2-4ecc-b4ff-b57271af9d9e" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.000204] env[61570]: DEBUG nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1486.000323] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1486.001021] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7bf5845e-5c20-4a9b-a8da-06a4725f3f16 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.013204] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1445867-9f3f-4b0b-bcc1-6d3a154a56e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.037421] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37267a03-2747-4f05-984e-c45e188442f4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.052330] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ddab6060-65d2-4ecc-b4ff-b57271af9d9e could not be found. [ 1486.052530] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1486.053197] env[61570]: INFO nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1486.053197] env[61570]: DEBUG oslo.service.loopingcall [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.053697] env[61570]: DEBUG nova.compute.manager [-] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1486.053813] env[61570]: DEBUG nova.network.neutron [-] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1486.058637] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a1ad00-86f6-4332-bc53-f63ba7eb75f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.091544] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903bd5ba-6413-43b2-a35c-b29a34fd66f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.100212] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ab932-888b-4225-a210-a3018c68c06b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.114897] env[61570]: DEBUG nova.compute.provider_tree [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1486.124412] env[61570]: DEBUG nova.scheduler.client.report [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.140253] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.140595] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1486.175959] env[61570]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1486.176275] env[61570]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4601e8e1-e02c-4ec5-bb93-cf47838be98e'] [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1486.176798] env[61570]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1486.177244] env[61570]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.177702] env[61570]: ERROR oslo.service.loopingcall [ 1486.178113] env[61570]: ERROR nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.203170] env[61570]: DEBUG nova.compute.utils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.206029] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1486.206029] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1486.213345] env[61570]: ERROR nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] exception_handler_v20(status_code, error_body) [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise client_exc(message=error_message, [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Neutron server returns request_ids: ['req-4601e8e1-e02c-4ec5-bb93-cf47838be98e'] [ 1486.213345] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] During handling of the above exception, another exception occurred: [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Traceback (most recent call last): [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._delete_instance(context, instance, bdms) [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._shutdown_instance(context, instance, bdms) [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._try_deallocate_network(context, instance, requested_networks) [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] with excutils.save_and_reraise_exception(): [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.213940] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.force_reraise() [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise self.value [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] _deallocate_network_with_retries() [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return evt.wait() [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = hub.switch() [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.greenlet.switch() [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1486.214356] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = func(*self.args, **self.kw) [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] result = f(*args, **kwargs) [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._deallocate_network( [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self.network_api.deallocate_for_instance( [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] data = neutron.list_ports(**search_opts) [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.list('ports', self.ports_path, retrieve_all, [ 1486.214655] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] for r in self._pagination(collection, path, **params): [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] res = self.get(path, params=params) [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.retry_request("GET", action, body=body, [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1486.214973] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] return self.do_request(method, action, body=body, [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] ret = obj(*args, **kwargs) [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] self._handle_fault_response(status_code, replybody, resp) [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.215339] env[61570]: ERROR nova.compute.manager [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] [ 1486.217284] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1486.245899] env[61570]: DEBUG oslo_concurrency.lockutils [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.662s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.247404] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 365.033s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.247515] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1486.247669] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "ddab6060-65d2-4ecc-b4ff-b57271af9d9e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.283783] env[61570]: DEBUG nova.policy [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ba23e12dd7047c6b0e67723388911ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97bb60f83e91408c89c70c926c236a71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1486.301520] env[61570]: INFO nova.compute.manager [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] [instance: ddab6060-65d2-4ecc-b4ff-b57271af9d9e] Successfully reverted task state from None on failure for instance. [ 1486.305518] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server [None req-778b2552-5e83-4c1e-b14c-e7e3f99a51ee tempest-DeleteServersAdminTestJSON-606561492 tempest-DeleteServersAdminTestJSON-606561492-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4601e8e1-e02c-4ec5-bb93-cf47838be98e'] [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1486.310296] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1486.310788] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1486.311287] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1486.311750] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1486.312497] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1486.312996] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1486.313564] env[61570]: ERROR oslo_messaging.rpc.server [ 1486.334573] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1486.334836] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1486.334975] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1486.335177] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1486.335325] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1486.335475] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1486.335687] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1486.335847] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1486.336102] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1486.336749] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1486.336749] env[61570]: DEBUG nova.virt.hardware [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1486.337409] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3732521-5e03-419e-a7f7-4329b446d044 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.346704] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36898172-9301-4bb2-a01b-407438d90a6e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.417870] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "1b211472-f426-4e7f-8f7a-70564c84e59b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.418161] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.742270] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Successfully created port: 4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.339493] env[61570]: DEBUG nova.compute.manager [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Received event network-vif-plugged-4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1487.339798] env[61570]: DEBUG oslo_concurrency.lockutils [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] Acquiring lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.339909] env[61570]: DEBUG oslo_concurrency.lockutils [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.340230] env[61570]: DEBUG oslo_concurrency.lockutils [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.340394] env[61570]: DEBUG nova.compute.manager [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] No waiting events found dispatching network-vif-plugged-4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1487.340557] env[61570]: WARNING nova.compute.manager [req-5534ab4e-3422-45e3-bbc6-041b4572f26b req-bb6bf5da-0e5c-40b4-9517-20958ee1089e service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Received unexpected event network-vif-plugged-4d858952-744f-47b4-913c-5aea5ec01a44 for instance with vm_state building and task_state spawning. [ 1487.433130] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Successfully updated port: 4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1487.449657] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.449814] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquired lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.449967] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1487.502985] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1487.767227] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Updating instance_info_cache with network_info: [{"id": "4d858952-744f-47b4-913c-5aea5ec01a44", "address": "fa:16:3e:66:ff:ea", "network": {"id": "c06c2b37-b69b-4e10-9449-3e052d69c3ca", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1389306940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97bb60f83e91408c89c70c926c236a71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d858952-74", "ovs_interfaceid": "4d858952-744f-47b4-913c-5aea5ec01a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.782256] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Releasing lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.782523] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance network_info: |[{"id": "4d858952-744f-47b4-913c-5aea5ec01a44", "address": "fa:16:3e:66:ff:ea", "network": {"id": "c06c2b37-b69b-4e10-9449-3e052d69c3ca", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1389306940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97bb60f83e91408c89c70c926c236a71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d858952-74", "ovs_interfaceid": "4d858952-744f-47b4-913c-5aea5ec01a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1487.782940] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:ff:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d858952-744f-47b4-913c-5aea5ec01a44', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1487.791194] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Creating folder: Project (97bb60f83e91408c89c70c926c236a71). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1487.791796] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-653825dd-d1a3-4057-b59b-a7293dd79c5a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.802762] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Created folder: Project (97bb60f83e91408c89c70c926c236a71) in parent group-v953072. [ 1487.802976] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Creating folder: Instances. Parent ref: group-v953156. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1487.803255] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0e0b799-7a00-4843-8d94-94fafa2fc7e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.813473] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Created folder: Instances in parent group-v953156. [ 1487.813733] env[61570]: DEBUG oslo.service.loopingcall [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.813924] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1487.814154] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7344f67-f682-4ea8-8f51-1c315f709c7f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.834591] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1487.834591] env[61570]: value = "task-4891396" [ 1487.834591] env[61570]: _type = "Task" [ 1487.834591] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.849677] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891396, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.344817] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891396, 'name': CreateVM_Task, 'duration_secs': 0.345498} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.345112] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1488.345689] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.345861] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.346231] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1488.346494] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e936fc79-d9b7-4a10-bb32-52f9b2df07a9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.352361] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for the task: (returnval){ [ 1488.352361] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]525cb27c-bf46-a329-b0da-eefe02381fd7" [ 1488.352361] env[61570]: _type = "Task" [ 1488.352361] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.360275] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]525cb27c-bf46-a329-b0da-eefe02381fd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.863685] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.863951] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.864187] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.364373] env[61570]: DEBUG nova.compute.manager [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Received event network-changed-4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1489.364610] env[61570]: DEBUG nova.compute.manager [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Refreshing instance network info cache due to event network-changed-4d858952-744f-47b4-913c-5aea5ec01a44. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1489.364732] env[61570]: DEBUG oslo_concurrency.lockutils [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] Acquiring lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.364880] env[61570]: DEBUG oslo_concurrency.lockutils [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] Acquired lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.365050] env[61570]: DEBUG nova.network.neutron [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Refreshing network info cache for port 4d858952-744f-47b4-913c-5aea5ec01a44 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1489.665124] env[61570]: DEBUG nova.network.neutron [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Updated VIF entry in instance network info cache for port 4d858952-744f-47b4-913c-5aea5ec01a44. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1489.665500] env[61570]: DEBUG nova.network.neutron [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Updating instance_info_cache with network_info: [{"id": "4d858952-744f-47b4-913c-5aea5ec01a44", "address": "fa:16:3e:66:ff:ea", "network": {"id": "c06c2b37-b69b-4e10-9449-3e052d69c3ca", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1389306940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97bb60f83e91408c89c70c926c236a71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d858952-74", "ovs_interfaceid": "4d858952-744f-47b4-913c-5aea5ec01a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.676972] env[61570]: DEBUG oslo_concurrency.lockutils [req-235b0c29-88a1-472a-ad00-683b57244547 req-41599452-3be0-466e-9c54-6eb4f80362ab service nova] Releasing lock "refresh_cache-fe2fe70e-6a16-4b74-9766-583f8ca87dd3" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.753615] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.754017] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1526.752588] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.765379] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.765676] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.765764] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.765916] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1526.767064] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639e4aa7-a4d6-4e46-998f-880b6287470e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.776332] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e259a2-bfd6-48cc-b71b-49a9948373cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.792317] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b5b4ce-9ec4-467d-8dc8-881319d223db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.799431] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cdfe3d-dbac-475c-a6a5-dce16dff19e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.830837] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180600MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1526.830993] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.831216] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.916747] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.916922] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917061] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f9d0b44c-a338-495e-8ed2-9c79813671fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917444] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917576] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917697] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917812] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.917925] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.918053] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1526.929307] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1526.941168] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1526.952523] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1526.963609] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1526.963918] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1526.964011] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '67', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'io_workload': '10', 'num_proj_41f716cba1d94cf28a341cc027112585': '1', 'num_proj_eccce396bddd42cc931ff5c0e6850b49': '1', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_task_spawning': '1', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1527.143711] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18eea56c-dc0c-4b8d-bfc4-18c76aae9f95 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.151830] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e381348-ff8b-4b0b-b9e1-8e22dd51a1dd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.181275] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39111314-40ce-44d3-a1de-2ff63e949d5e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.189317] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0d81cc-72e2-46d3-aa9d-034d4ba980f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.204376] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.212891] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.229763] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1527.229964] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.399s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.231073] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.753232] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.753406] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1528.753522] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1528.774405] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.774547] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.774684] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.774813] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.774938] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775072] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775198] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775318] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775434] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775550] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1528.775672] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1529.753471] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.753882] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.753688] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.754090] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.749346] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.072263] env[61570]: WARNING oslo_vmware.rw_handles [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1534.072263] env[61570]: ERROR oslo_vmware.rw_handles [ 1534.072778] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1534.074585] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1534.074841] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Copying Virtual Disk [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/5e8f3088-728b-40c4-ad39-f93a8e67ca50/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1534.075139] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31932e7e-60d3-4178-9ff7-53a3af051c24 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.083976] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 1534.083976] env[61570]: value = "task-4891397" [ 1534.083976] env[61570]: _type = "Task" [ 1534.083976] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.094913] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.595179] env[61570]: DEBUG oslo_vmware.exceptions [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1534.595479] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.596073] env[61570]: ERROR nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1534.596073] env[61570]: Faults: ['InvalidArgument'] [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Traceback (most recent call last): [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] yield resources [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self.driver.spawn(context, instance, image_meta, [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self._fetch_image_if_missing(context, vi) [ 1534.596073] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] image_cache(vi, tmp_image_ds_loc) [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] vm_util.copy_virtual_disk( [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] session._wait_for_task(vmdk_copy_task) [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return self.wait_for_task(task_ref) [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return evt.wait() [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] result = hub.switch() [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1534.596685] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return self.greenlet.switch() [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self.f(*self.args, **self.kw) [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] raise exceptions.translate_fault(task_info.error) [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Faults: ['InvalidArgument'] [ 1534.597214] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] [ 1534.597214] env[61570]: INFO nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Terminating instance [ 1534.598019] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.598252] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.598501] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0520f300-ebc1-463f-8376-b5ba5a8ae2b9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.601079] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1534.601272] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1534.602007] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc19199e-2b12-4ad5-b0e9-26fbfe4ca062 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.609344] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1534.609616] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff86d7bf-7803-4c5a-8493-316b401bc3f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.612033] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.612207] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1534.613222] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f7f9c3-b5a8-46de-9dba-6f44ad82403a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.618310] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 1534.618310] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]523be79b-0fb9-bd12-b3a8-029ed8841158" [ 1534.618310] env[61570]: _type = "Task" [ 1534.618310] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.625306] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]523be79b-0fb9-bd12-b3a8-029ed8841158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.686934] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1534.687189] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1534.687379] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleting the datastore file [datastore2] 60ecef78-bcc2-42ab-bdba-83e8009dbe98 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1534.687691] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59d142c8-f48f-4259-af97-c11b9b3e33c5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.694149] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 1534.694149] env[61570]: value = "task-4891399" [ 1534.694149] env[61570]: _type = "Task" [ 1534.694149] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.702330] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.129390] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1535.129766] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Creating directory with path [datastore2] vmware_temp/1415046d-bd4b-455e-9fe8-c2bab7c44b65/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.129922] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51f2e86d-05ec-458f-94c4-ff20a70e98eb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.142972] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Created directory with path [datastore2] vmware_temp/1415046d-bd4b-455e-9fe8-c2bab7c44b65/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.143168] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Fetch image to [datastore2] vmware_temp/1415046d-bd4b-455e-9fe8-c2bab7c44b65/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1535.143339] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/1415046d-bd4b-455e-9fe8-c2bab7c44b65/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1535.144143] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbd8f1f-2679-4169-bd3e-e98759f35c0c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.151389] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1982c9fc-c965-4488-b1ae-481802123e48 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.160661] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb387ab4-a1ba-4a73-8fd1-bfa1d6cbe188 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.192102] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b7fcc7-08c8-47cc-8fd0-35df8947e3de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.203757] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-da449703-6486-4bd1-8b08-1a5776f47376 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.205510] env[61570]: DEBUG oslo_vmware.api [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081976} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.205752] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1535.205929] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1535.206110] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1535.206284] env[61570]: INFO nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1535.208438] env[61570]: DEBUG nova.compute.claims [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1535.208616] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.208828] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.232193] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1535.404378] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.405169] env[61570]: ERROR nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1535.405169] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1535.405531] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] yield resources [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.driver.spawn(context, instance, image_meta, [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._fetch_image_if_missing(context, vi) [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image_fetch(context, vi, tmp_image_ds_loc) [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] images.fetch_image( [ 1535.405824] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] metadata = IMAGE_API.get(context, image_ref) [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return session.show(context, image_id, [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] _reraise_translated_image_exception(image_id) [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise new_exc.with_traceback(exc_trace) [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1535.406442] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1535.406937] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1535.407466] env[61570]: INFO nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Terminating instance [ 1535.409007] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.409239] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.411960] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76265a8a-19d4-4516-a64e-19d11bb5ba30 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.414751] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1535.414990] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1535.415718] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad3d443-4261-43d7-8082-443a0b453ae3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.423545] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1535.423545] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f035aed-bf4b-4adc-b444-69b7d8044d1d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.425841] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.426031] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1535.429731] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85966a93-ea69-4bf8-b405-e0b88973da47 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.436738] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for the task: (returnval){ [ 1535.436738] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526fe5e7-2753-5a2d-569b-99b00fd22dee" [ 1535.436738] env[61570]: _type = "Task" [ 1535.436738] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.445916] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]526fe5e7-2753-5a2d-569b-99b00fd22dee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.472723] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423b148b-9c57-431c-a279-77d3d787d76a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.480648] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae4eaa9-9c0c-423c-aa79-934f8f569bdd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.487861] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1535.488015] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1535.488202] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleting the datastore file [datastore2] 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1535.512882] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df498353-3507-433b-af03-36bf618fa77b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.515784] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f08e0e-5dc1-4bbc-8c67-0d105df4c241 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.525023] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9b1990-5d86-4a5b-b00b-694354b16a7f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.529110] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for the task: (returnval){ [ 1535.529110] env[61570]: value = "task-4891401" [ 1535.529110] env[61570]: _type = "Task" [ 1535.529110] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.540388] env[61570]: DEBUG nova.compute.provider_tree [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1535.546786] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': task-4891401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.549896] env[61570]: DEBUG nova.scheduler.client.report [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1535.564383] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.355s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.564910] env[61570]: ERROR nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1535.564910] env[61570]: Faults: ['InvalidArgument'] [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Traceback (most recent call last): [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self.driver.spawn(context, instance, image_meta, [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self._fetch_image_if_missing(context, vi) [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] image_cache(vi, tmp_image_ds_loc) [ 1535.564910] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] vm_util.copy_virtual_disk( [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] session._wait_for_task(vmdk_copy_task) [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return self.wait_for_task(task_ref) [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return evt.wait() [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] result = hub.switch() [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] return self.greenlet.switch() [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1535.565322] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] self.f(*self.args, **self.kw) [ 1535.565660] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1535.565660] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] raise exceptions.translate_fault(task_info.error) [ 1535.565660] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1535.565660] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Faults: ['InvalidArgument'] [ 1535.565660] env[61570]: ERROR nova.compute.manager [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] [ 1535.565660] env[61570]: DEBUG nova.compute.utils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1535.567247] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Build of instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 was re-scheduled: A specified parameter was not correct: fileType [ 1535.567247] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1535.567714] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1535.567850] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1535.568036] env[61570]: DEBUG nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1535.568205] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1535.894978] env[61570]: DEBUG nova.network.neutron [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.913208] env[61570]: INFO nova.compute.manager [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Took 0.34 seconds to deallocate network for instance. [ 1535.948629] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1535.948795] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Creating directory with path [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.948981] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c84aaaf-efcd-4f8a-a6dd-4dd186be1766 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.963163] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Created directory with path [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.963999] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Fetch image to [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1535.963999] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1535.964576] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83893d36-c64e-4f32-a79d-4dedee17d027 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.973507] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7afc376-6aef-4c6d-aa1a-6e7726067613 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.984158] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fee69a-4471-4fcc-ad2d-b68c4f755571 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.021762] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f097accb-860c-401a-8c34-3851555cb4f8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.033687] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e8b750a3-14c0-4f23-9ed8-a617ac8fe63e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.042038] env[61570]: DEBUG oslo_vmware.api [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Task: {'id': task-4891401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080834} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.042325] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1536.043045] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1536.043045] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1536.043294] env[61570]: INFO nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1536.045957] env[61570]: DEBUG nova.compute.claims [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1536.046149] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.046368] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.057281] env[61570]: INFO nova.scheduler.client.report [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted allocations for instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 [ 1536.066293] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1536.128204] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9aef2a72-4014-4e58-8d4f-c94174577eae tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.791s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.129290] env[61570]: DEBUG oslo_vmware.rw_handles [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1536.132037] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 442.553s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.132037] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.132037] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.132599] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.135856] env[61570]: INFO nova.compute.manager [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Terminating instance [ 1536.192033] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.192229] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.192405] env[61570]: DEBUG nova.network.neutron [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1536.193505] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1536.201563] env[61570]: DEBUG oslo_vmware.rw_handles [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1536.201819] env[61570]: DEBUG oslo_vmware.rw_handles [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1536.251384] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.267568] env[61570]: DEBUG nova.network.neutron [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1536.365896] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92308e76-ce61-4178-b286-df960a407927 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.376549] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5aad22c-2578-4f1e-8d75-ec5b33b5ddb9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.405300] env[61570]: DEBUG nova.network.neutron [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.407026] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ba3347-1b7c-49c1-a96c-3641573a20a1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.416206] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ab8194-5a26-45d6-8fba-24da134e457d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.420349] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "refresh_cache-60ecef78-bcc2-42ab-bdba-83e8009dbe98" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.420477] env[61570]: DEBUG nova.compute.manager [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1536.420588] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1536.422744] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5575b4f2-56f8-47a1-9951-40200fb7f609 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.432438] env[61570]: DEBUG nova.compute.provider_tree [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.439983] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d93fe66-d355-4a3e-a5f2-abd3f0ae0480 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.451209] env[61570]: DEBUG nova.scheduler.client.report [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.477243] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 60ecef78-bcc2-42ab-bdba-83e8009dbe98 could not be found. [ 1536.477485] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1536.477701] env[61570]: INFO nova.compute.manager [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1536.477977] env[61570]: DEBUG oslo.service.loopingcall [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.478689] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.432s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.479386] env[61570]: ERROR nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1536.479386] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.479729] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.driver.spawn(context, instance, image_meta, [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._fetch_image_if_missing(context, vi) [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image_fetch(context, vi, tmp_image_ds_loc) [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] images.fetch_image( [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] metadata = IMAGE_API.get(context, image_ref) [ 1536.480056] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return session.show(context, image_id, [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] _reraise_translated_image_exception(image_id) [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise new_exc.with_traceback(exc_trace) [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1536.480404] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1536.480822] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.480822] env[61570]: DEBUG nova.compute.utils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1536.481368] env[61570]: DEBUG nova.compute.manager [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1536.481470] env[61570]: DEBUG nova.network.neutron [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1536.484787] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Build of instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 was re-scheduled: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1536.484787] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1536.484787] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1536.484787] env[61570]: DEBUG nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1536.486186] env[61570]: DEBUG nova.network.neutron [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1536.486186] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.234s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.487095] env[61570]: INFO nova.compute.claims [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1536.518438] env[61570]: DEBUG nova.network.neutron [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1536.527093] env[61570]: DEBUG nova.network.neutron [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.535948] env[61570]: INFO nova.compute.manager [-] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] Took 0.05 seconds to deallocate network for instance. [ 1536.617306] env[61570]: DEBUG neutronclient.v2_0.client [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1536.618633] env[61570]: ERROR nova.compute.manager [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1536.618633] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.619020] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.driver.spawn(context, instance, image_meta, [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._fetch_image_if_missing(context, vi) [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image_fetch(context, vi, tmp_image_ds_loc) [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] images.fetch_image( [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] metadata = IMAGE_API.get(context, image_ref) [ 1536.619375] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return session.show(context, image_id, [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] _reraise_translated_image_exception(image_id) [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise new_exc.with_traceback(exc_trace) [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = getattr(controller, method)(*args, **kwargs) [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._get(image_id) [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1536.619738] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] resp, body = self.http_client.get(url, headers=header) [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.request(url, 'GET', **kwargs) [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self._handle_response(resp) [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exc.from_response(resp, resp.content) [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.ImageNotAuthorized: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620109] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._build_and_run_instance(context, instance, image, [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exception.RescheduledException( [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.RescheduledException: Build of instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 was re-scheduled: Not authorized for image 64ba497f-0d92-47de-bece-8112101951ad. [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1536.620473] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] exception_handler_v20(status_code, error_body) [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise client_exc(message=error_message, [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Neutron server returns request_ids: ['req-bd63ca80-c940-47a2-9441-f229a824c515'] [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._deallocate_network(context, instance, requested_networks) [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.network_api.deallocate_for_instance( [ 1536.620828] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] data = neutron.list_ports(**search_opts) [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.list('ports', self.ports_path, retrieve_all, [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] for r in self._pagination(collection, path, **params): [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] res = self.get(path, params=params) [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.621844] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.retry_request("GET", action, body=body, [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.do_request(method, action, body=body, [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._handle_fault_response(status_code, replybody, resp) [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exception.Unauthorized() [ 1536.622268] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.Unauthorized: Not authorized. [ 1536.622795] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.678126] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0be707bb-8a09-4137-a7e0-ffe2d006f6af tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.546s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.678869] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 415.465s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.679069] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 60ecef78-bcc2-42ab-bdba-83e8009dbe98] During sync_power_state the instance has a pending task (deleting). Skip. [ 1536.679244] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "60ecef78-bcc2-42ab-bdba-83e8009dbe98" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.703678] env[61570]: INFO nova.scheduler.client.report [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Deleted allocations for instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 [ 1536.728032] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e036e971-d015-4cde-abbc-8f5454208d03 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.680s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.729071] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.240s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.729289] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Acquiring lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.729513] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.729688] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.731665] env[61570]: INFO nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Terminating instance [ 1536.733377] env[61570]: DEBUG nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1536.733569] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1536.734059] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a2bb5a9-a771-4b5b-8460-3c1f585494f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.739811] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c5da8f-7d5c-40f1-b84a-6250333f886b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.743880] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1536.749097] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5274ddd-1485-4402-b39f-f1108c6d9a10 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.764434] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59e2776-7edf-4f1d-8a4b-cb93da9ef64e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.807291] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993fc1c1-5601-4999-aca6-dce424b0eb3a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.810063] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cf357dd-3b71-4c76-8feb-04b9145dd4f4 could not be found. [ 1536.810303] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1536.810405] env[61570]: INFO nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1536.810650] env[61570]: DEBUG oslo.service.loopingcall [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.812978] env[61570]: DEBUG nova.compute.manager [-] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1536.812978] env[61570]: DEBUG nova.network.neutron [-] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1536.820902] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cc4a1d-51f9-4562-bd9c-574e4f4c09b6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.836276] env[61570]: DEBUG nova.compute.provider_tree [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.837776] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.845864] env[61570]: DEBUG nova.scheduler.client.report [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.862251] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.862739] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1536.866509] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.030s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.867852] env[61570]: INFO nova.compute.claims [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1536.897522] env[61570]: DEBUG nova.compute.utils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1536.898819] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1536.898994] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1536.908764] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1536.914439] env[61570]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1536.914669] env[61570]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-ee493c40-f9ee-4fe9-a973-9fb86edd59eb'] [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1536.915185] env[61570]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1536.915749] env[61570]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1536.916286] env[61570]: ERROR oslo.service.loopingcall [ 1536.916799] env[61570]: ERROR nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1536.953556] env[61570]: ERROR nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] exception_handler_v20(status_code, error_body) [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise client_exc(message=error_message, [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Neutron server returns request_ids: ['req-ee493c40-f9ee-4fe9-a973-9fb86edd59eb'] [ 1536.953556] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] During handling of the above exception, another exception occurred: [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Traceback (most recent call last): [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._delete_instance(context, instance, bdms) [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._shutdown_instance(context, instance, bdms) [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._try_deallocate_network(context, instance, requested_networks) [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] with excutils.save_and_reraise_exception(): [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1536.953946] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.force_reraise() [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise self.value [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] _deallocate_network_with_retries() [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return evt.wait() [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = hub.switch() [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.greenlet.switch() [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1536.954283] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = func(*self.args, **self.kw) [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] result = f(*args, **kwargs) [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._deallocate_network( [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self.network_api.deallocate_for_instance( [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] data = neutron.list_ports(**search_opts) [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.list('ports', self.ports_path, retrieve_all, [ 1536.954627] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] for r in self._pagination(collection, path, **params): [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] res = self.get(path, params=params) [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.retry_request("GET", action, body=body, [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1536.954979] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] return self.do_request(method, action, body=body, [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] ret = obj(*args, **kwargs) [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] self._handle_fault_response(status_code, replybody, resp) [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1536.955302] env[61570]: ERROR nova.compute.manager [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] [ 1536.971202] env[61570]: DEBUG nova.policy [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bac1d2133034dd7926fed738dfffe37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '977a17d2733049fa8200053e72fc086c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1536.976713] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1536.982508] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Lock "9cf357dd-3b71-4c76-8feb-04b9145dd4f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.253s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.003698] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1537.003936] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1537.004330] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1537.004610] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1537.004798] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1537.005015] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1537.005285] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1537.005485] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1537.005693] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1537.005867] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1537.006055] env[61570]: DEBUG nova.virt.hardware [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1537.006896] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dea7865-8f12-4592-a5f2-e9033d52816e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.021197] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477831e4-cd98-47a1-b48b-d29bb7b3e122 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.042461] env[61570]: INFO nova.compute.manager [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] [instance: 9cf357dd-3b71-4c76-8feb-04b9145dd4f4] Successfully reverted task state from None on failure for instance. [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server [None req-ac61376f-3f8f-47b5-afde-f6a0987493e2 tempest-MigrationsAdminTest-2128029456 tempest-MigrationsAdminTest-2128029456-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-ee493c40-f9ee-4fe9-a973-9fb86edd59eb'] [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1537.046289] env[61570]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1537.046812] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1537.047580] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1537.048115] env[61570]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.048682] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1537.049171] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1537.049818] env[61570]: ERROR oslo_messaging.rpc.server [ 1537.100088] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4090c05-77ba-4b76-9a0d-45de2769fd2e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.108401] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0e9e5b-1b33-4324-aff0-79e891901b94 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.141445] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8868fe-e0b5-4b9a-92d3-ebc4348584d6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.150197] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478fcd9a-1567-48b6-bc75-8467a32b6e38 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.165739] env[61570]: DEBUG nova.compute.provider_tree [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.174640] env[61570]: DEBUG nova.scheduler.client.report [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1537.193447] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.194734] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1537.238498] env[61570]: DEBUG nova.compute.utils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1537.239896] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1537.240108] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1537.255519] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1537.319758] env[61570]: DEBUG nova.policy [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'faa3004e73e241b5b638d976ad086925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1325c2eb2c3a40e18a473bd0c4cb7bad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1537.333914] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1537.364564] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1537.364848] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1537.365015] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1537.365213] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1537.365360] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1537.365534] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1537.365746] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1537.365981] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1537.366321] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1537.366526] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1537.366705] env[61570]: DEBUG nova.virt.hardware [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1537.367643] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf8371-211e-4ac7-aec2-9232c93b89d2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.377853] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224f63f1-97d7-4c16-8964-5ea102d9522f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.498157] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Successfully created port: 323b704b-5155-465f-a722-ef091352393f {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1537.878330] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Successfully created port: 524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1538.228307] env[61570]: DEBUG nova.compute.manager [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Received event network-vif-plugged-323b704b-5155-465f-a722-ef091352393f {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1538.228577] env[61570]: DEBUG oslo_concurrency.lockutils [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] Acquiring lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.228726] env[61570]: DEBUG oslo_concurrency.lockutils [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.228897] env[61570]: DEBUG oslo_concurrency.lockutils [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.229236] env[61570]: DEBUG nova.compute.manager [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] No waiting events found dispatching network-vif-plugged-323b704b-5155-465f-a722-ef091352393f {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1538.229402] env[61570]: WARNING nova.compute.manager [req-87bb6023-2d81-4803-b087-7ae2eb74a0c5 req-119c6598-da6e-4b7c-b149-2c7f34829991 service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Received unexpected event network-vif-plugged-323b704b-5155-465f-a722-ef091352393f for instance with vm_state building and task_state spawning. [ 1538.427874] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Successfully updated port: 323b704b-5155-465f-a722-ef091352393f {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1538.444981] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.445165] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquired lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.445303] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1538.521867] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1538.526854] env[61570]: DEBUG nova.compute.manager [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Received event network-vif-plugged-524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1538.527107] env[61570]: DEBUG oslo_concurrency.lockutils [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] Acquiring lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.527393] env[61570]: DEBUG oslo_concurrency.lockutils [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.527598] env[61570]: DEBUG oslo_concurrency.lockutils [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.527784] env[61570]: DEBUG nova.compute.manager [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] No waiting events found dispatching network-vif-plugged-524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1538.527992] env[61570]: WARNING nova.compute.manager [req-8713351d-8da5-479d-9e06-a44d0d8c0714 req-c59af9bd-b91b-41b9-9d0d-94973b49a2d9 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Received unexpected event network-vif-plugged-524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 for instance with vm_state building and task_state spawning. [ 1538.684861] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Successfully updated port: 524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1538.699426] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.699631] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquired lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.699813] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1538.744194] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1538.962482] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Updating instance_info_cache with network_info: [{"id": "323b704b-5155-465f-a722-ef091352393f", "address": "fa:16:3e:87:66:fd", "network": {"id": "50f2d4a8-03a8-48c3-8d63-367ea34bbf8e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-497213239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "977a17d2733049fa8200053e72fc086c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "043ca97b-0fca-4b54-8be6-027123fa76d0", "external-id": "nsx-vlan-transportzone-702", "segmentation_id": 702, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap323b704b-51", "ovs_interfaceid": "323b704b-5155-465f-a722-ef091352393f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.975702] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Releasing lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.976034] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance network_info: |[{"id": "323b704b-5155-465f-a722-ef091352393f", "address": "fa:16:3e:87:66:fd", "network": {"id": "50f2d4a8-03a8-48c3-8d63-367ea34bbf8e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-497213239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "977a17d2733049fa8200053e72fc086c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "043ca97b-0fca-4b54-8be6-027123fa76d0", "external-id": "nsx-vlan-transportzone-702", "segmentation_id": 702, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap323b704b-51", "ovs_interfaceid": "323b704b-5155-465f-a722-ef091352393f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1538.976449] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:66:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '043ca97b-0fca-4b54-8be6-027123fa76d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '323b704b-5155-465f-a722-ef091352393f', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1538.984183] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Creating folder: Project (977a17d2733049fa8200053e72fc086c). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1538.984776] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c76b5e6f-b74a-4a80-bf0f-4a0974ea3daa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.996442] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Created folder: Project (977a17d2733049fa8200053e72fc086c) in parent group-v953072. [ 1538.996687] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Creating folder: Instances. Parent ref: group-v953159. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1538.996887] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e84bb66b-1e3b-48f6-9eff-8fe6933323a6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.005893] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Created folder: Instances in parent group-v953159. [ 1539.006180] env[61570]: DEBUG oslo.service.loopingcall [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.006374] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1539.010023] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8932c5a4-68cf-4d1f-b9b8-35e1f08b7f64 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.021674] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Updating instance_info_cache with network_info: [{"id": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "address": "fa:16:3e:2f:f2:df", "network": {"id": "ef346165-abed-445e-b3a0-856cb3d1e2a6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1288746113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1325c2eb2c3a40e18a473bd0c4cb7bad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap524ba31d-8e", "ovs_interfaceid": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.028905] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.028905] env[61570]: value = "task-4891404" [ 1539.028905] env[61570]: _type = "Task" [ 1539.028905] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.034426] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Releasing lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.034755] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance network_info: |[{"id": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "address": "fa:16:3e:2f:f2:df", "network": {"id": "ef346165-abed-445e-b3a0-856cb3d1e2a6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1288746113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1325c2eb2c3a40e18a473bd0c4cb7bad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap524ba31d-8e", "ovs_interfaceid": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1539.038236] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:f2:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '524ba31d-8e5f-420b-ae82-e5ccf5d5cad3', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.045800] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Creating folder: Project (1325c2eb2c3a40e18a473bd0c4cb7bad). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1539.046547] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891404, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.046981] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-882b6ef2-8ca7-4465-b403-0a5bcf1313f7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.059614] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Created folder: Project (1325c2eb2c3a40e18a473bd0c4cb7bad) in parent group-v953072. [ 1539.059833] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Creating folder: Instances. Parent ref: group-v953162. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1539.060105] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a4b68df-90f6-4d07-b6f0-744bdd88269d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.071011] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Created folder: Instances in parent group-v953162. [ 1539.071463] env[61570]: DEBUG oslo.service.loopingcall [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.071463] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1539.071672] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76c2c743-8f49-47e9-bd37-f8e5c5c2f9bc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.094221] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.094221] env[61570]: value = "task-4891407" [ 1539.094221] env[61570]: _type = "Task" [ 1539.094221] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.102959] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891407, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.541808] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891404, 'name': CreateVM_Task, 'duration_secs': 0.322131} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.542164] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1539.542655] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.542823] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.543166] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1539.543418] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25821184-517d-4945-8086-62ef020fb4f4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.548037] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for the task: (returnval){ [ 1539.548037] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f0c449-0b05-1e57-8a2a-7ecc8bd66ae4" [ 1539.548037] env[61570]: _type = "Task" [ 1539.548037] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.556313] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f0c449-0b05-1e57-8a2a-7ecc8bd66ae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.603794] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891407, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.058677] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.058942] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1540.059176] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.104742] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891407, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.261832] env[61570]: DEBUG nova.compute.manager [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Received event network-changed-323b704b-5155-465f-a722-ef091352393f {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1540.262142] env[61570]: DEBUG nova.compute.manager [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Refreshing instance network info cache due to event network-changed-323b704b-5155-465f-a722-ef091352393f. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1540.262424] env[61570]: DEBUG oslo_concurrency.lockutils [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] Acquiring lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.262667] env[61570]: DEBUG oslo_concurrency.lockutils [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] Acquired lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.262901] env[61570]: DEBUG nova.network.neutron [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Refreshing network info cache for port 323b704b-5155-465f-a722-ef091352393f {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1540.549283] env[61570]: DEBUG nova.network.neutron [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Updated VIF entry in instance network info cache for port 323b704b-5155-465f-a722-ef091352393f. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1540.549708] env[61570]: DEBUG nova.network.neutron [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Updating instance_info_cache with network_info: [{"id": "323b704b-5155-465f-a722-ef091352393f", "address": "fa:16:3e:87:66:fd", "network": {"id": "50f2d4a8-03a8-48c3-8d63-367ea34bbf8e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-497213239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "977a17d2733049fa8200053e72fc086c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "043ca97b-0fca-4b54-8be6-027123fa76d0", "external-id": "nsx-vlan-transportzone-702", "segmentation_id": 702, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap323b704b-51", "ovs_interfaceid": "323b704b-5155-465f-a722-ef091352393f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.559644] env[61570]: DEBUG oslo_concurrency.lockutils [req-bc050a5e-e896-4ce7-83ed-646446a1faa9 req-8b5312f8-2672-4db5-873e-4f473033aefe service nova] Releasing lock "refresh_cache-1ed132c9-9efe-4a40-b4da-308a7b23bb42" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.605679] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891407, 'name': CreateVM_Task, 'duration_secs': 1.346862} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.605850] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1540.606507] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.606664] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.606978] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.607241] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a65e03e-86cf-4c85-817f-dfa1f316cf91 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.613158] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for the task: (returnval){ [ 1540.613158] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]520c964a-e681-cfaf-c376-5960e10c08be" [ 1540.613158] env[61570]: _type = "Task" [ 1540.613158] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.623841] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]520c964a-e681-cfaf-c376-5960e10c08be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.642345] env[61570]: DEBUG nova.compute.manager [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Received event network-changed-524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1540.642345] env[61570]: DEBUG nova.compute.manager [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Refreshing instance network info cache due to event network-changed-524ba31d-8e5f-420b-ae82-e5ccf5d5cad3. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1540.642494] env[61570]: DEBUG oslo_concurrency.lockutils [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] Acquiring lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.642653] env[61570]: DEBUG oslo_concurrency.lockutils [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] Acquired lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.642757] env[61570]: DEBUG nova.network.neutron [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Refreshing network info cache for port 524ba31d-8e5f-420b-ae82-e5ccf5d5cad3 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1541.129693] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.133026] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1541.133026] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.197664] env[61570]: DEBUG nova.network.neutron [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Updated VIF entry in instance network info cache for port 524ba31d-8e5f-420b-ae82-e5ccf5d5cad3. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1541.198056] env[61570]: DEBUG nova.network.neutron [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Updating instance_info_cache with network_info: [{"id": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "address": "fa:16:3e:2f:f2:df", "network": {"id": "ef346165-abed-445e-b3a0-856cb3d1e2a6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1288746113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1325c2eb2c3a40e18a473bd0c4cb7bad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap524ba31d-8e", "ovs_interfaceid": "524ba31d-8e5f-420b-ae82-e5ccf5d5cad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.214132] env[61570]: DEBUG oslo_concurrency.lockutils [req-8acb83e5-6312-4e03-b65e-c6554751cb80 req-51d58604-e96f-4072-b90b-076409686941 service nova] Releasing lock "refresh_cache-431ffe34-71c4-4b44-a83c-59895fef3fc7" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.233374] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.233374] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.350298] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.110136] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.927639] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.927986] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.430392] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ed21eab6-b7ef-495a-89d6-08cc736d3ac6 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "164797e5-5e37-4573-9f57-8f90a0508f99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.430759] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ed21eab6-b7ef-495a-89d6-08cc736d3ac6 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "164797e5-5e37-4573-9f57-8f90a0508f99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.967322] env[61570]: WARNING oslo_vmware.rw_handles [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1583.967322] env[61570]: ERROR oslo_vmware.rw_handles [ 1583.967993] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1583.969665] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1583.969950] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Copying Virtual Disk [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/a08f7413-d2be-4431-842f-ec895e402813/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1583.970267] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbf17de8-7d3c-40e9-a1e3-916cb8aac0de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.980597] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for the task: (returnval){ [ 1583.980597] env[61570]: value = "task-4891408" [ 1583.980597] env[61570]: _type = "Task" [ 1583.980597] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.989335] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Task: {'id': task-4891408, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.491220] env[61570]: DEBUG oslo_vmware.exceptions [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1584.491533] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.492156] env[61570]: ERROR nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1584.492156] env[61570]: Faults: ['InvalidArgument'] [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Traceback (most recent call last): [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] yield resources [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self.driver.spawn(context, instance, image_meta, [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self._fetch_image_if_missing(context, vi) [ 1584.492156] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] image_cache(vi, tmp_image_ds_loc) [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] vm_util.copy_virtual_disk( [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] session._wait_for_task(vmdk_copy_task) [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return self.wait_for_task(task_ref) [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return evt.wait() [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] result = hub.switch() [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1584.492542] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return self.greenlet.switch() [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self.f(*self.args, **self.kw) [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] raise exceptions.translate_fault(task_info.error) [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Faults: ['InvalidArgument'] [ 1584.493118] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] [ 1584.493118] env[61570]: INFO nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Terminating instance [ 1584.494403] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.494403] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1584.494609] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f69f185-7dfd-4aed-8470-c92a510d25e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.497283] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1584.497511] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1584.498238] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023ea6f1-0576-4e25-b74e-5ecefad0a651 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.505872] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1584.506098] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca4178e8-8a5e-4960-b8b2-1f13c67187bb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.508720] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1584.508894] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1584.509961] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667a8925-d8f9-45b1-a1d4-bdfcbc06ea84 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.515835] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for the task: (returnval){ [ 1584.515835] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529eb7e5-27b5-0446-8763-077d70ec128d" [ 1584.515835] env[61570]: _type = "Task" [ 1584.515835] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.533023] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1584.533023] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Creating directory with path [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1584.533023] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f738112-5cd3-4dba-8d48-63e6d5d5feb5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.558953] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Created directory with path [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1584.558953] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Fetch image to [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1584.558953] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1584.560270] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa0e70b-3c1d-464e-aeef-5b31bf843483 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.567031] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2695a773-0668-44d5-af53-8632f42516fa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.576893] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e5ef5e-9a7d-494d-a3c3-fafa1eb97c42 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.610557] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe182852-ceb4-427c-901c-0691ef3ed5cd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.613257] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1584.613446] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1584.613619] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Deleting the datastore file [datastore2] f9d0b44c-a338-495e-8ed2-9c79813671fe {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1584.613854] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81dca2aa-53c4-4b72-9968-79adb3726627 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.619568] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2ff75819-5ebf-41f1-8201-c8e5843e22a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.622582] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for the task: (returnval){ [ 1584.622582] env[61570]: value = "task-4891410" [ 1584.622582] env[61570]: _type = "Task" [ 1584.622582] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.630592] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Task: {'id': task-4891410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.645613] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1584.711387] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1584.771016] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1584.771265] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1585.132816] env[61570]: DEBUG oslo_vmware.api [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Task: {'id': task-4891410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078919} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.133102] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1585.133276] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1585.133452] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1585.133622] env[61570]: INFO nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1585.135838] env[61570]: DEBUG nova.compute.claims [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1585.136028] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.136254] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.421402] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28ffe1a-797a-4f05-a49d-c8f7fd5a2a12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.430177] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b87c6db-4bfe-41a2-8c81-3576134a45ea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.460175] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76828fad-b9c3-4437-8ae6-21212e652c8f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.468366] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1ab79a-a453-48cb-a12f-96e8e999619f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.483170] env[61570]: DEBUG nova.compute.provider_tree [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.491601] env[61570]: DEBUG nova.scheduler.client.report [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1585.509554] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.373s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.510170] env[61570]: ERROR nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.510170] env[61570]: Faults: ['InvalidArgument'] [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Traceback (most recent call last): [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self.driver.spawn(context, instance, image_meta, [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self._fetch_image_if_missing(context, vi) [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] image_cache(vi, tmp_image_ds_loc) [ 1585.510170] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] vm_util.copy_virtual_disk( [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] session._wait_for_task(vmdk_copy_task) [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return self.wait_for_task(task_ref) [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return evt.wait() [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] result = hub.switch() [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] return self.greenlet.switch() [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1585.510567] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] self.f(*self.args, **self.kw) [ 1585.510965] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1585.510965] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] raise exceptions.translate_fault(task_info.error) [ 1585.510965] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1585.510965] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Faults: ['InvalidArgument'] [ 1585.510965] env[61570]: ERROR nova.compute.manager [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] [ 1585.510965] env[61570]: DEBUG nova.compute.utils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1585.512491] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Build of instance f9d0b44c-a338-495e-8ed2-9c79813671fe was re-scheduled: A specified parameter was not correct: fileType [ 1585.512491] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1585.512864] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1585.513051] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1585.513231] env[61570]: DEBUG nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1585.513392] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1585.950015] env[61570]: DEBUG nova.network.neutron [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.970771] env[61570]: INFO nova.compute.manager [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Took 0.46 seconds to deallocate network for instance. [ 1586.103388] env[61570]: INFO nova.scheduler.client.report [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Deleted allocations for instance f9d0b44c-a338-495e-8ed2-9c79813671fe [ 1586.142015] env[61570]: DEBUG oslo_concurrency.lockutils [None req-68e2c996-9291-4017-a054-372a55e6dfd6 tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.422s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.143726] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.799s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.143726] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Acquiring lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.143726] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.144072] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.145855] env[61570]: INFO nova.compute.manager [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Terminating instance [ 1586.147744] env[61570]: DEBUG nova.compute.manager [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1586.147892] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1586.148384] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49015d53-c0df-4468-8710-d45187a5b5b2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.158527] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59349014-0c07-494e-9272-577edf6fe6bb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.170139] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1586.195345] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9d0b44c-a338-495e-8ed2-9c79813671fe could not be found. [ 1586.195345] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1586.195345] env[61570]: INFO nova.compute.manager [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1586.195521] env[61570]: DEBUG oslo.service.loopingcall [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.195745] env[61570]: DEBUG nova.compute.manager [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1586.195832] env[61570]: DEBUG nova.network.neutron [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1586.223749] env[61570]: DEBUG nova.network.neutron [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.227756] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.228066] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.229540] env[61570]: INFO nova.compute.claims [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.233947] env[61570]: INFO nova.compute.manager [-] [instance: f9d0b44c-a338-495e-8ed2-9c79813671fe] Took 0.04 seconds to deallocate network for instance. [ 1586.348125] env[61570]: DEBUG oslo_concurrency.lockutils [None req-32f1c3f7-50a7-433b-af88-177b94ec30ba tempest-ServersNegativeTestMultiTenantJSON-742428444 tempest-ServersNegativeTestMultiTenantJSON-742428444-project-member] Lock "f9d0b44c-a338-495e-8ed2-9c79813671fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.516934] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e773cec6-ca65-4d64-92e3-87f33ff3a207 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.524807] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de3fd6-063c-496a-8232-3077f880e516 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.555567] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fec005-0c09-4379-a34b-2eef9c85fe04 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.563409] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d557921d-20c0-4466-8ecb-5b5db389e3a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.577710] env[61570]: DEBUG nova.compute.provider_tree [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.589368] env[61570]: DEBUG nova.scheduler.client.report [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1586.606309] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.606817] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1586.686289] env[61570]: DEBUG nova.compute.utils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1586.687239] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1586.687362] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1586.700522] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1586.770920] env[61570]: DEBUG nova.policy [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f39d4ad55c0e49399dd3facf87c0a719', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35794e305c9f4380b941db6b873ec99c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1586.786172] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1586.818744] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1586.819010] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1586.819185] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1586.819371] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1586.819569] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1586.819725] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1586.819959] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1586.820147] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1586.820318] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1586.820485] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1586.820656] env[61570]: DEBUG nova.virt.hardware [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1586.821628] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097d0e7b-10e8-4c87-aec3-72f713f72301 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.830602] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e1bb96-d64a-4313-85d3-0499d565ea0d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.983032] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "d7a13cff-f371-46d2-baea-b01a3731724a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.983334] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.129774] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Successfully created port: 75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1587.752884] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1587.753318] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1588.187624] env[61570]: DEBUG nova.compute.manager [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Received event network-vif-plugged-75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1588.187885] env[61570]: DEBUG oslo_concurrency.lockutils [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] Acquiring lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.188122] env[61570]: DEBUG oslo_concurrency.lockutils [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.188294] env[61570]: DEBUG oslo_concurrency.lockutils [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.188464] env[61570]: DEBUG nova.compute.manager [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] No waiting events found dispatching network-vif-plugged-75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1588.188623] env[61570]: WARNING nova.compute.manager [req-dc5fbfa0-5c0d-454b-bfcf-f769ce8b4e69 req-53ff8869-313b-40e5-a3d0-54b3e651708d service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Received unexpected event network-vif-plugged-75d9ac44-458b-489d-8b9f-10507806f09e for instance with vm_state building and task_state spawning. [ 1588.243901] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Successfully updated port: 75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1588.254960] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.256116] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.256116] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1588.309798] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1588.417123] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a0d09e-58b4-48ec-b069-e032825402de tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "05d44e9d-c370-4d48-9f16-40191ece6f80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.417383] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a0d09e-58b4-48ec-b069-e032825402de tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "05d44e9d-c370-4d48-9f16-40191ece6f80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.533828] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Updating instance_info_cache with network_info: [{"id": "75d9ac44-458b-489d-8b9f-10507806f09e", "address": "fa:16:3e:a2:b4:cd", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75d9ac44-45", "ovs_interfaceid": "75d9ac44-458b-489d-8b9f-10507806f09e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.545840] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Releasing lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.546142] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance network_info: |[{"id": "75d9ac44-458b-489d-8b9f-10507806f09e", "address": "fa:16:3e:a2:b4:cd", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75d9ac44-45", "ovs_interfaceid": "75d9ac44-458b-489d-8b9f-10507806f09e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1588.546829] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:b4:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53ef6889-a40c-40f5-a6e5-d8726606296a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75d9ac44-458b-489d-8b9f-10507806f09e', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1588.554329] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Creating folder: Project (35794e305c9f4380b941db6b873ec99c). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1588.554844] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4441f7f6-236f-419f-aeb8-ccd4202de117 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.565849] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Created folder: Project (35794e305c9f4380b941db6b873ec99c) in parent group-v953072. [ 1588.566110] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Creating folder: Instances. Parent ref: group-v953165. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1588.566362] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c72280be-7ca7-4a92-bbfa-ebdce725f74a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.575774] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Created folder: Instances in parent group-v953165. [ 1588.576036] env[61570]: DEBUG oslo.service.loopingcall [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.576293] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1588.576511] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-952c5170-fec1-4af5-8b8d-699a8efa069a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.597393] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.597393] env[61570]: value = "task-4891413" [ 1588.597393] env[61570]: _type = "Task" [ 1588.597393] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.608370] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891413, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.753485] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.764989] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.765266] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.765398] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.765554] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1588.766695] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a304a4-6595-42e2-a040-0bd47c60e88d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.775710] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd44914-a0c1-4232-93eb-7d3e6b117306 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.790520] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7133d26c-d319-4796-aed6-c70e53cf6b8c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.797881] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4d2b73-11b9-40b6-914d-80f2bb8b495f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.828750] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180584MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1588.828907] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.829115] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.907814] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.907989] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance c69c8589-88e1-481e-87b8-55608322440c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908136] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908316] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908465] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908583] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908730] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.908888] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.909014] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.909161] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1588.921833] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.934616] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.947306] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.960137] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 164797e5-5e37-4573-9f57-8f90a0508f99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.972174] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.983836] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 05d44e9d-c370-4d48-9f16-40191ece6f80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1588.984153] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1588.984355] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '70', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'io_workload': '10', 'num_proj_1453684a18b64487b99eedf9f842fd60': '1', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_task_spawning': '2', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1589.108608] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891413, 'name': CreateVM_Task, 'duration_secs': 0.389409} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.108792] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1589.109471] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.109631] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.109981] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1589.110273] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91e7657-d94f-45e0-b650-a7fa78c05a34 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.114937] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for the task: (returnval){ [ 1589.114937] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5235cd46-0c23-b456-7a46-3739dbe8ac20" [ 1589.114937] env[61570]: _type = "Task" [ 1589.114937] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.125896] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5235cd46-0c23-b456-7a46-3739dbe8ac20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.190768] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d70a76-6168-4dff-869f-43d1dd87c375 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.198696] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f381d8-dfaf-482f-ada1-8c621c3bb427 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.233531] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31e557c-7eaf-4606-86b8-b3cdb947c170 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.242055] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3c3735-d797-4c49-987b-4419a8d6da2d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.255824] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.265171] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1589.280044] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1589.280288] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.451s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.625459] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.625722] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1589.625937] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.215515] env[61570]: DEBUG nova.compute.manager [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Received event network-changed-75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1590.215759] env[61570]: DEBUG nova.compute.manager [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Refreshing instance network info cache due to event network-changed-75d9ac44-458b-489d-8b9f-10507806f09e. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1590.215909] env[61570]: DEBUG oslo_concurrency.lockutils [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] Acquiring lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.216097] env[61570]: DEBUG oslo_concurrency.lockutils [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] Acquired lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.216229] env[61570]: DEBUG nova.network.neutron [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Refreshing network info cache for port 75d9ac44-458b-489d-8b9f-10507806f09e {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1590.280280] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.280527] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.591043] env[61570]: DEBUG nova.network.neutron [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Updated VIF entry in instance network info cache for port 75d9ac44-458b-489d-8b9f-10507806f09e. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1590.591043] env[61570]: DEBUG nova.network.neutron [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Updating instance_info_cache with network_info: [{"id": "75d9ac44-458b-489d-8b9f-10507806f09e", "address": "fa:16:3e:a2:b4:cd", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75d9ac44-45", "ovs_interfaceid": "75d9ac44-458b-489d-8b9f-10507806f09e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.600720] env[61570]: DEBUG oslo_concurrency.lockutils [req-4b157f3e-5c23-48db-b05d-58e4a8932cc9 req-841b5b13-6073-4701-a0c4-f134e34bf816 service nova] Releasing lock "refresh_cache-efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.753147] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.753346] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1590.753495] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1590.776160] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776326] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: c69c8589-88e1-481e-87b8-55608322440c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776454] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776583] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776705] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776824] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.776942] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.777071] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.777196] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.777316] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1590.777436] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1590.778034] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.778276] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.753564] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.747790] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.747615] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.070589] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.128278] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.283564] env[61570]: WARNING oslo_vmware.rw_handles [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1634.283564] env[61570]: ERROR oslo_vmware.rw_handles [ 1634.284376] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1634.286137] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1634.286436] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Copying Virtual Disk [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/83722d69-438f-4b12-bc62-aea0d28cf9e1/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1634.286769] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb036ca3-65fc-436e-874c-7817b7ead46d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.295052] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for the task: (returnval){ [ 1634.295052] env[61570]: value = "task-4891414" [ 1634.295052] env[61570]: _type = "Task" [ 1634.295052] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.303498] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Task: {'id': task-4891414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.806565] env[61570]: DEBUG oslo_vmware.exceptions [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1634.806565] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.806780] env[61570]: ERROR nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1634.806780] env[61570]: Faults: ['InvalidArgument'] [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] Traceback (most recent call last): [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] yield resources [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self.driver.spawn(context, instance, image_meta, [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self._fetch_image_if_missing(context, vi) [ 1634.806780] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] image_cache(vi, tmp_image_ds_loc) [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] vm_util.copy_virtual_disk( [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] session._wait_for_task(vmdk_copy_task) [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return self.wait_for_task(task_ref) [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return evt.wait() [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] result = hub.switch() [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1634.807179] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return self.greenlet.switch() [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self.f(*self.args, **self.kw) [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] raise exceptions.translate_fault(task_info.error) [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] Faults: ['InvalidArgument'] [ 1634.807624] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] [ 1634.807624] env[61570]: INFO nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Terminating instance [ 1634.808536] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.808743] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1634.808997] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d77f26d-74b8-444b-8d72-7b068c7c80ad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.811319] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1634.811551] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1634.812297] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1f2d5b-6fe3-4c39-9e3d-f57cf3585826 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.819677] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1634.819917] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78c98cea-148d-476d-af3b-60179df43b39 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.822346] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1634.822550] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1634.823526] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a30410c-81f3-4f5d-a3b0-3627ef3c2ad5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.828659] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for the task: (returnval){ [ 1634.828659] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522ed0dc-4efa-8535-9a8e-f5e5ae7d9c90" [ 1634.828659] env[61570]: _type = "Task" [ 1634.828659] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.836940] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522ed0dc-4efa-8535-9a8e-f5e5ae7d9c90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.895546] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1634.895801] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1634.896087] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Deleting the datastore file [datastore2] c69c8589-88e1-481e-87b8-55608322440c {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1634.896393] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f79ebdc5-a1c8-49f6-9af0-244a3b6495bc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.903271] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for the task: (returnval){ [ 1634.903271] env[61570]: value = "task-4891416" [ 1634.903271] env[61570]: _type = "Task" [ 1634.903271] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.913276] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Task: {'id': task-4891416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.338672] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1635.338939] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Creating directory with path [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.339199] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e89b75d2-2e25-4078-a6e5-5fccf4008304 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.351293] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Created directory with path [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.351450] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Fetch image to [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1635.351631] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1635.352401] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f653de-e6fa-40f1-bf3e-074498133f53 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.359387] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb828a5-39a7-46ca-a1a6-0ef1478069c9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.368902] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e3ea17-a1b7-44d3-8a57-c2a99864f8e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.399327] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ef01af-6463-4780-8c11-3dd086ae7a49 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.408913] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c2859b59-35f1-4643-aef7-ebeb6535f995 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.415578] env[61570]: DEBUG oslo_vmware.api [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Task: {'id': task-4891416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072027} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.415814] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.415990] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1635.416173] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1635.416343] env[61570]: INFO nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1635.418557] env[61570]: DEBUG nova.compute.claims [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1635.418732] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.418945] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.432087] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1635.494448] env[61570]: DEBUG oslo_vmware.rw_handles [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1635.554386] env[61570]: DEBUG oslo_vmware.rw_handles [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1635.554596] env[61570]: DEBUG oslo_vmware.rw_handles [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1635.737197] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85986637-0a8e-43f5-845c-46a1b23124b5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.744841] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc8ff50-bea3-43a2-8219-f2f662575d45 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.775526] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cd29a0-c988-4c3c-80c3-1aefc42dbf50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.783871] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4599836-2eba-416e-bce2-bf05e5751da3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.798044] env[61570]: DEBUG nova.compute.provider_tree [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.806059] env[61570]: DEBUG nova.scheduler.client.report [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.821829] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.403s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.822413] env[61570]: ERROR nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1635.822413] env[61570]: Faults: ['InvalidArgument'] [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] Traceback (most recent call last): [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self.driver.spawn(context, instance, image_meta, [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self._fetch_image_if_missing(context, vi) [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] image_cache(vi, tmp_image_ds_loc) [ 1635.822413] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] vm_util.copy_virtual_disk( [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] session._wait_for_task(vmdk_copy_task) [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return self.wait_for_task(task_ref) [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return evt.wait() [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] result = hub.switch() [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] return self.greenlet.switch() [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1635.822754] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] self.f(*self.args, **self.kw) [ 1635.823094] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1635.823094] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] raise exceptions.translate_fault(task_info.error) [ 1635.823094] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1635.823094] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] Faults: ['InvalidArgument'] [ 1635.823094] env[61570]: ERROR nova.compute.manager [instance: c69c8589-88e1-481e-87b8-55608322440c] [ 1635.823226] env[61570]: DEBUG nova.compute.utils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1635.824744] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Build of instance c69c8589-88e1-481e-87b8-55608322440c was re-scheduled: A specified parameter was not correct: fileType [ 1635.824744] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1635.825141] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1635.825374] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1635.825487] env[61570]: DEBUG nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1635.825649] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1636.181811] env[61570]: DEBUG nova.network.neutron [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.202275] env[61570]: INFO nova.compute.manager [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Took 0.38 seconds to deallocate network for instance. [ 1636.307785] env[61570]: INFO nova.scheduler.client.report [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Deleted allocations for instance c69c8589-88e1-481e-87b8-55608322440c [ 1636.330996] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67c743b8-eaec-432a-81eb-170bcc76e85f tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 475.219s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.332358] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 279.963s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.332587] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Acquiring lock "c69c8589-88e1-481e-87b8-55608322440c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.333350] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.333350] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.335216] env[61570]: INFO nova.compute.manager [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Terminating instance [ 1636.336956] env[61570]: DEBUG nova.compute.manager [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1636.337088] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1636.337859] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72a6dcd2-5b07-4e89-b3f7-82a0ed412aa4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.348518] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2207e08-caf6-4091-8f79-a3dc3865648a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.360196] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1636.385545] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c69c8589-88e1-481e-87b8-55608322440c could not be found. [ 1636.385737] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1636.385890] env[61570]: INFO nova.compute.manager [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] [instance: c69c8589-88e1-481e-87b8-55608322440c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1636.386151] env[61570]: DEBUG oslo.service.loopingcall [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1636.386418] env[61570]: DEBUG nova.compute.manager [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1636.386507] env[61570]: DEBUG nova.network.neutron [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1636.410510] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.410782] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.412657] env[61570]: INFO nova.compute.claims [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1636.417074] env[61570]: DEBUG nova.network.neutron [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.425179] env[61570]: INFO nova.compute.manager [-] [instance: c69c8589-88e1-481e-87b8-55608322440c] Took 0.04 seconds to deallocate network for instance. [ 1636.547219] env[61570]: DEBUG oslo_concurrency.lockutils [None req-31e8ecbd-66d4-44d2-b506-8b71b39d3e28 tempest-ServerAddressesTestJSON-47438040 tempest-ServerAddressesTestJSON-47438040-project-member] Lock "c69c8589-88e1-481e-87b8-55608322440c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.215s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.676863] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca40834-3405-4e0c-bf5c-b599522c727c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.687014] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d523e2-35a4-44ce-b7b1-6786d5c20a55 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.722572] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa46ba14-c313-4c48-b055-41a87c13de08 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.728237] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e363b9f-df75-4a46-ba14-7c39037d7f79 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.741993] env[61570]: DEBUG nova.compute.provider_tree [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.751879] env[61570]: DEBUG nova.scheduler.client.report [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1636.769728] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.359s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.770148] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1636.807308] env[61570]: DEBUG nova.compute.utils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.808580] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1636.808745] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1636.818312] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1636.882735] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1636.896780] env[61570]: DEBUG nova.policy [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab2f35e4a6b744db8470656aed0cc984', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34fecdc3cc7f47fdba241831e5f27f53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1636.918482] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1636.918740] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1636.918893] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1636.919077] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1636.919222] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1636.919364] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1636.919757] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1636.919953] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1636.920135] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1636.920293] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1636.920489] env[61570]: DEBUG nova.virt.hardware [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1636.921682] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4c7830-6172-461d-93af-ef04bb89bb53 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.930503] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb3d6d1-e36e-48a5-9024-9cfb6f8c4042 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.241381] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Successfully created port: 995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.984184] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Successfully updated port: 995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.000450] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.000929] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.000929] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1638.079015] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1638.232561] env[61570]: DEBUG nova.compute.manager [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Received event network-vif-plugged-995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1638.232777] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Acquiring lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.233052] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.233318] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.233465] env[61570]: DEBUG nova.compute.manager [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] No waiting events found dispatching network-vif-plugged-995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1638.233665] env[61570]: WARNING nova.compute.manager [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Received unexpected event network-vif-plugged-995996a0-5055-4f4e-9385-835e3ad3b2ed for instance with vm_state building and task_state spawning. [ 1638.233830] env[61570]: DEBUG nova.compute.manager [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Received event network-changed-995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1638.233983] env[61570]: DEBUG nova.compute.manager [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Refreshing instance network info cache due to event network-changed-995996a0-5055-4f4e-9385-835e3ad3b2ed. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1638.234172] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Acquiring lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.288166] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Updating instance_info_cache with network_info: [{"id": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "address": "fa:16:3e:dc:97:0e", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995996a0-50", "ovs_interfaceid": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.302882] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.303223] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance network_info: |[{"id": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "address": "fa:16:3e:dc:97:0e", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995996a0-50", "ovs_interfaceid": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1638.304170] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Acquired lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.304170] env[61570]: DEBUG nova.network.neutron [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Refreshing network info cache for port 995996a0-5055-4f4e-9385-835e3ad3b2ed {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1638.305225] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:97:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '995996a0-5055-4f4e-9385-835e3ad3b2ed', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1638.313271] env[61570]: DEBUG oslo.service.loopingcall [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.317243] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1638.317831] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9608ead1-25fa-4f18-886c-e5bffea5f569 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.338826] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1638.338826] env[61570]: value = "task-4891417" [ 1638.338826] env[61570]: _type = "Task" [ 1638.338826] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.347353] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891417, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.693756] env[61570]: DEBUG nova.network.neutron [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Updated VIF entry in instance network info cache for port 995996a0-5055-4f4e-9385-835e3ad3b2ed. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1638.694305] env[61570]: DEBUG nova.network.neutron [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Updating instance_info_cache with network_info: [{"id": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "address": "fa:16:3e:dc:97:0e", "network": {"id": "64098a3f-b071-475d-9f9c-47d463aa1eb0", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-929405124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34fecdc3cc7f47fdba241831e5f27f53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995996a0-50", "ovs_interfaceid": "995996a0-5055-4f4e-9385-835e3ad3b2ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.708549] env[61570]: DEBUG oslo_concurrency.lockutils [req-95f3e266-feaa-4bcb-8f5f-328d4bb22419 req-571fedb3-fdcc-4bbb-896a-258c2ba2c7bc service nova] Releasing lock "refresh_cache-1b211472-f426-4e7f-8f7a-70564c84e59b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.849321] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891417, 'name': CreateVM_Task, 'duration_secs': 0.385237} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.849510] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1638.850178] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.850348] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.850763] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.850975] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c58a71d-e2a9-4e45-a36d-764d16cf7e0d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.855818] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 1638.855818] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52db618b-52ec-1d7f-db49-bb940cc14c9f" [ 1638.855818] env[61570]: _type = "Task" [ 1638.855818] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.865342] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52db618b-52ec-1d7f-db49-bb940cc14c9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.367451] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.367837] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1639.367992] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.753585] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.753948] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1649.753513] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.753789] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.753305] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.753559] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.753799] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.764971] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.765217] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.765485] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.765730] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1650.767086] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4a6171-9d86-4e8a-a642-1d0ef674405a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.777085] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d42884-f7f0-479c-818e-8c5babddbb22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.791761] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38aa9281-6c54-492f-830d-a3c33e521f6d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.799051] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a512312e-8730-4a31-b2da-776663e5d1cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.828952] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180587MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1650.829088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.830028] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.906129] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance db38d263-aa3d-46b1-a13d-1469155fad84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906129] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906129] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906129] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906398] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.906398] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1650.917399] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.928556] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.939369] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 164797e5-5e37-4573-9f57-8f90a0508f99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.951054] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.965669] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 05d44e9d-c370-4d48-9f16-40191ece6f80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1650.965669] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1650.965903] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '71', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_9697eba07488413b9b05222af1a8e33d': '1', 'io_workload': '10', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_task_spawning': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1651.162585] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3670bc0-388c-444d-a454-51ddf40d20a9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.171095] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ea33d5-daae-4bac-a141-34c27149e391 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.200993] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656db300-05e8-485d-b9c7-4438fc3952a5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.208607] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c667a8de-b03d-400e-a8d8-b00f6acd33fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.221980] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.230799] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.248770] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1651.248994] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.420s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.249572] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.249892] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1652.249892] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1652.271867] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272233] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272394] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272525] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272651] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272773] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.272898] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.273026] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.273146] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.273262] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1652.273385] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1652.944508] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "788bc317-0136-42c9-b8f6-7d1a68df3109" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.944750] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.974781] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "f46d0506-529b-47d0-ad10-c152bb9005f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.975035] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "f46d0506-529b-47d0-ad10-c152bb9005f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.752759] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.749086] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.705692] env[61570]: WARNING oslo_vmware.rw_handles [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1680.705692] env[61570]: ERROR oslo_vmware.rw_handles [ 1680.706363] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1680.708124] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1680.708426] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Copying Virtual Disk [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/88e5f720-623f-4025-a84b-aaea1c169080/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1680.708754] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71a17b9c-ea9b-4b93-ad2c-44828e6a46cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.718698] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for the task: (returnval){ [ 1680.718698] env[61570]: value = "task-4891418" [ 1680.718698] env[61570]: _type = "Task" [ 1680.718698] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.727965] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Task: {'id': task-4891418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.229077] env[61570]: DEBUG oslo_vmware.exceptions [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1681.229374] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.229919] env[61570]: ERROR nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1681.229919] env[61570]: Faults: ['InvalidArgument'] [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Traceback (most recent call last): [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] yield resources [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self.driver.spawn(context, instance, image_meta, [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self._fetch_image_if_missing(context, vi) [ 1681.229919] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] image_cache(vi, tmp_image_ds_loc) [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] vm_util.copy_virtual_disk( [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] session._wait_for_task(vmdk_copy_task) [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return self.wait_for_task(task_ref) [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return evt.wait() [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] result = hub.switch() [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1681.230419] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return self.greenlet.switch() [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self.f(*self.args, **self.kw) [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] raise exceptions.translate_fault(task_info.error) [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Faults: ['InvalidArgument'] [ 1681.230770] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] [ 1681.230770] env[61570]: INFO nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Terminating instance [ 1681.231959] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.232205] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.232449] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dff6406-2b04-49e0-b28a-e0c39e120473 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.235304] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1681.235500] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1681.236249] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6b2831-074d-46ae-98f7-3919b0d7bd85 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.243709] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1681.243980] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3358aff-4803-4421-a02a-f4d95b1c648e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.246270] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1681.246442] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1681.247384] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2095ff2-ce65-4a3a-9c0f-21d516701b4a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.252442] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for the task: (returnval){ [ 1681.252442] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52953907-c5f9-dd20-34d0-0ec9d755fefe" [ 1681.252442] env[61570]: _type = "Task" [ 1681.252442] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.264141] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52953907-c5f9-dd20-34d0-0ec9d755fefe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.321228] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1681.321441] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1681.321640] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Deleting the datastore file [datastore2] db38d263-aa3d-46b1-a13d-1469155fad84 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.321996] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76b6d82f-3cf9-4783-906e-ca7fcc84bcf4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.328672] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for the task: (returnval){ [ 1681.328672] env[61570]: value = "task-4891420" [ 1681.328672] env[61570]: _type = "Task" [ 1681.328672] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.336696] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Task: {'id': task-4891420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.763160] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1681.763522] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Creating directory with path [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.763631] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc46e0b7-8f75-4393-b4d5-a25efecc4e86 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.776047] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Created directory with path [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1681.776047] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Fetch image to [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1681.776047] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1681.776384] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc7a2f2-800b-4f39-828e-a8f56b0f6de9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.783158] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2b79ca-f50f-47c3-a505-ad81128269e7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.792158] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a361996d-5c47-438f-b6d2-fcc58f943b8f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.824536] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc7044a-7134-4423-b813-855583ae3950 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.833885] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0350a73a-30d2-4055-914c-6856242cbd18 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.841053] env[61570]: DEBUG oslo_vmware.api [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Task: {'id': task-4891420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080787} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.841215] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1681.841366] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1681.841535] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1681.841703] env[61570]: INFO nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1681.844057] env[61570]: DEBUG nova.compute.claims [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1681.844057] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.844312] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.856661] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1681.913786] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1681.973450] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1681.973623] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1682.049393] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "1b211472-f426-4e7f-8f7a-70564c84e59b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.179923] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624d7769-4ae2-457a-ba63-b0f127c632f8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.188310] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4015bec-c0bf-4897-9fc1-88679cb03198 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.218307] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c328e4-476e-40d9-9698-bfd6ff5fd364 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.226165] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd21e21-5faa-4ba7-bd41-093534a3b4fb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.240685] env[61570]: DEBUG nova.compute.provider_tree [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.250593] env[61570]: DEBUG nova.scheduler.client.report [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1682.265667] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.421s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.266239] env[61570]: ERROR nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.266239] env[61570]: Faults: ['InvalidArgument'] [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Traceback (most recent call last): [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self.driver.spawn(context, instance, image_meta, [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self._fetch_image_if_missing(context, vi) [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] image_cache(vi, tmp_image_ds_loc) [ 1682.266239] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] vm_util.copy_virtual_disk( [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] session._wait_for_task(vmdk_copy_task) [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return self.wait_for_task(task_ref) [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return evt.wait() [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] result = hub.switch() [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] return self.greenlet.switch() [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1682.266807] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] self.f(*self.args, **self.kw) [ 1682.267391] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1682.267391] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] raise exceptions.translate_fault(task_info.error) [ 1682.267391] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1682.267391] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Faults: ['InvalidArgument'] [ 1682.267391] env[61570]: ERROR nova.compute.manager [instance: db38d263-aa3d-46b1-a13d-1469155fad84] [ 1682.267391] env[61570]: DEBUG nova.compute.utils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1682.269689] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Build of instance db38d263-aa3d-46b1-a13d-1469155fad84 was re-scheduled: A specified parameter was not correct: fileType [ 1682.269689] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1682.270095] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1682.270275] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1682.270456] env[61570]: DEBUG nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1682.270617] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1682.632756] env[61570]: DEBUG nova.network.neutron [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.646722] env[61570]: INFO nova.compute.manager [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Took 0.38 seconds to deallocate network for instance. [ 1682.749951] env[61570]: INFO nova.scheduler.client.report [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Deleted allocations for instance db38d263-aa3d-46b1-a13d-1469155fad84 [ 1682.768418] env[61570]: DEBUG oslo_concurrency.lockutils [None req-dc900b5d-ea7e-460f-966f-316850ab2346 tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 533.916s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.769578] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 338.401s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.769794] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Acquiring lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.769993] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.770176] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.773063] env[61570]: INFO nova.compute.manager [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Terminating instance [ 1682.774410] env[61570]: DEBUG nova.compute.manager [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1682.774600] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1682.775098] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e768106b-1b8c-47bd-8683-8870cacda248 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.784669] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab7dc8c-f8bb-4f45-8fe5-2cb50662299c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.795574] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1682.819144] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db38d263-aa3d-46b1-a13d-1469155fad84 could not be found. [ 1682.819375] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1682.819556] env[61570]: INFO nova.compute.manager [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1682.819832] env[61570]: DEBUG oslo.service.loopingcall [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1682.820088] env[61570]: DEBUG nova.compute.manager [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1682.820186] env[61570]: DEBUG nova.network.neutron [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1682.845572] env[61570]: DEBUG nova.network.neutron [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.846900] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.847148] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.848523] env[61570]: INFO nova.compute.claims [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1682.852635] env[61570]: INFO nova.compute.manager [-] [instance: db38d263-aa3d-46b1-a13d-1469155fad84] Took 0.03 seconds to deallocate network for instance. [ 1682.958772] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1c60ab1e-ab9a-4846-a214-9bb33e05864b tempest-ImagesNegativeTestJSON-399570359 tempest-ImagesNegativeTestJSON-399570359-project-member] Lock "db38d263-aa3d-46b1-a13d-1469155fad84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.110939] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aef7962-10b5-4302-ab91-9fcedbb809a6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.119791] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037ee470-72ab-4391-9a80-d5f0ef66a3a1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.150501] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0615433-2e12-40a1-85c1-c370dc842e30 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.158199] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a63d5b9-4725-4b46-8ed6-66ca36d6c923 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.171463] env[61570]: DEBUG nova.compute.provider_tree [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.181673] env[61570]: DEBUG nova.scheduler.client.report [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1683.197848] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.350s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.198311] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1683.235110] env[61570]: DEBUG nova.compute.utils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1683.236444] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Not allocating networking since 'none' was specified. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1683.246767] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1683.315023] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1683.342019] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1683.342019] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1683.342019] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1683.342250] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1683.342250] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1683.342250] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1683.342250] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1683.342250] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1683.342615] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1683.342927] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1683.343239] env[61570]: DEBUG nova.virt.hardware [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1683.344272] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52f8768-f45f-40cd-9072-d4f793064ed2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.352693] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8938cef-b752-4d42-aa48-f9d76aad8ac3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.368550] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance VIF info [] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1683.374684] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Creating folder: Project (9c61bebda38b4bd4b5d1d1a2068c49ba). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1683.375326] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d318c264-f6d5-42e7-b00a-8fd7efa14f21 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.386074] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Created folder: Project (9c61bebda38b4bd4b5d1d1a2068c49ba) in parent group-v953072. [ 1683.386074] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Creating folder: Instances. Parent ref: group-v953169. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1683.386074] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-302a8d77-1fbf-44e4-afa5-60a76bdd0ff6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.394535] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Created folder: Instances in parent group-v953169. [ 1683.394762] env[61570]: DEBUG oslo.service.loopingcall [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.394951] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1683.395173] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca91ad90-0c43-410f-aace-68f69a75f4d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.412717] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1683.412717] env[61570]: value = "task-4891423" [ 1683.412717] env[61570]: _type = "Task" [ 1683.412717] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.420442] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891423, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.923317] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891423, 'name': CreateVM_Task, 'duration_secs': 0.263768} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.923708] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1683.923913] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.924086] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.924424] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1683.924682] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a68ceac-70dc-421c-8a48-140d19abf39a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.929952] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for the task: (returnval){ [ 1683.929952] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52d5307f-d6a4-7579-8aaa-eb484a59efae" [ 1683.929952] env[61570]: _type = "Task" [ 1683.929952] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.940725] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52d5307f-d6a4-7579-8aaa-eb484a59efae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.440572] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.441325] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1684.441325] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.754509] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.754887] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1710.752632] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.753022] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.765610] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.765841] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.766021] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.766181] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1710.767278] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221d7cc6-69f7-41d1-9589-ed15594df21a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.776545] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805992be-e93b-4eaa-b528-2f779886b347 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.790872] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874c237f-6436-4904-9847-98a3c7e49ad2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.797904] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3decca6-f050-46ae-9ece-76ef308b0fb6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.828561] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180542MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1710.828827] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.828917] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.982225] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.982460] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.982595] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.982717] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.982834] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.982950] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.983082] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.983200] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.983310] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.983479] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1710.994883] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.005413] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 164797e5-5e37-4573-9f57-8f90a0508f99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.016457] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.027756] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 05d44e9d-c370-4d48-9f16-40191ece6f80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.071504] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.083898] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f46d0506-529b-47d0-ad10-c152bb9005f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1711.083898] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1711.084140] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '72', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_010349794e9d4aaf8248bc8855c58453': '1', 'io_workload': '10', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_task_spawning': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1711.287294] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc269f4-5788-4bc7-baef-0253c633b55e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.295134] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ab41b8-a7c9-4245-b44f-95c1bdcace35 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.326352] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb38552-66a6-4368-95cb-9a75fc5859de {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.334138] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff7dcd5-931d-4c48-8765-0a708c6c6f3f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.347408] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.356197] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.373982] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1711.374170] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.545s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.374622] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.374951] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.753538] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.753737] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.753988] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1713.754053] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1713.778017] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.778399] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.778595] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.778746] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.778873] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779068] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779141] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779266] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779383] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779503] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1713.779629] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1713.780312] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.780457] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 1713.821880] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 1714.795219] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.749355] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.749811] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.165520] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_power_states {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.187820] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 1725.187820] env[61570]: value = "domain-c8" [ 1725.187820] env[61570]: _type = "ClusterComputeResource" [ 1725.187820] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1725.188098] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8ad59c-aff3-48c1-9cd0-d0d75b7826df {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.205550] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 10 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1725.205731] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 5f5232f8-60f4-472f-ab6e-6273904481e8 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.205911] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid f3c3dfaa-bc34-4a73-acff-67fe5c8c490e {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206083] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206240] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 04741959-c2c4-4b38-92e7-43f941818775 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206391] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid fe2fe70e-6a16-4b74-9766-583f8ca87dd3 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206541] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 1ed132c9-9efe-4a40-b4da-308a7b23bb42 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206689] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 431ffe34-71c4-4b44-a83c-59895fef3fc7 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206833] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.206978] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 1b211472-f426-4e7f-8f7a-70564c84e59b {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.207141] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 1725.207479] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "5f5232f8-60f4-472f-ab6e-6273904481e8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.207849] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.208158] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.208391] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "04741959-c2c4-4b38-92e7-43f941818775" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.208598] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.208795] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.208989] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.209199] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.209392] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "1b211472-f426-4e7f-8f7a-70564c84e59b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.209579] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.752767] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.913246] env[61570]: WARNING oslo_vmware.rw_handles [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1728.913246] env[61570]: ERROR oslo_vmware.rw_handles [ 1728.913915] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1728.915849] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1728.916136] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Copying Virtual Disk [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/c695f79b-649a-4d49-bed1-c063809197b1/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1728.916437] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65b286b5-215b-49c4-ab51-2b4276b4aa93 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.925202] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for the task: (returnval){ [ 1728.925202] env[61570]: value = "task-4891424" [ 1728.925202] env[61570]: _type = "Task" [ 1728.925202] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.935575] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Task: {'id': task-4891424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.435538] env[61570]: DEBUG oslo_vmware.exceptions [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1729.435912] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.436494] env[61570]: ERROR nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1729.436494] env[61570]: Faults: ['InvalidArgument'] [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Traceback (most recent call last): [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] yield resources [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self.driver.spawn(context, instance, image_meta, [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self._fetch_image_if_missing(context, vi) [ 1729.436494] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] image_cache(vi, tmp_image_ds_loc) [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] vm_util.copy_virtual_disk( [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] session._wait_for_task(vmdk_copy_task) [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return self.wait_for_task(task_ref) [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return evt.wait() [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] result = hub.switch() [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1729.436846] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return self.greenlet.switch() [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self.f(*self.args, **self.kw) [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] raise exceptions.translate_fault(task_info.error) [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Faults: ['InvalidArgument'] [ 1729.437259] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] [ 1729.437259] env[61570]: INFO nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Terminating instance [ 1729.438465] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.438702] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.438957] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d0c44e5-3b8c-493f-b172-d8ce963e8bf6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.441575] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1729.441788] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1729.442627] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9487b24-3796-48eb-9929-55a1d1e77345 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.450955] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1729.451331] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36f9e1de-980a-4418-9836-f018e6d4ae84 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.453792] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.453962] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1729.455075] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81f3b39e-0180-464a-bced-0655c0536bec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.460753] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 1729.460753] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52beefa3-63b0-f921-c960-245842979754" [ 1729.460753] env[61570]: _type = "Task" [ 1729.460753] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.469282] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52beefa3-63b0-f921-c960-245842979754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.528552] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1729.528821] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1729.529017] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Deleting the datastore file [datastore2] 5f5232f8-60f4-472f-ab6e-6273904481e8 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1729.529303] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fa6aed6-9056-4f5f-92be-18d85191969f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.537337] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for the task: (returnval){ [ 1729.537337] env[61570]: value = "task-4891426" [ 1729.537337] env[61570]: _type = "Task" [ 1729.537337] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.546080] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Task: {'id': task-4891426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.762149] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1729.762337] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 1729.972097] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1729.972542] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating directory with path [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.972597] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e321084c-07d4-4956-aa73-58ac3d5fd8f4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.985349] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created directory with path [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.985623] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Fetch image to [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1729.985729] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1729.986601] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f64883f-b33c-4991-bdf1-a7912f97da4e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.994269] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912f2b83-57cd-42bd-8119-91b9a611f8d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.004224] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99576705-4482-4cdc-aaf0-479e9dc8f16b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.036923] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823f7a45-bdcd-4f4e-96db-3655dce138b4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.050072] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-22767f31-ee6a-44dc-87f5-083f8b09718f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.052233] env[61570]: DEBUG oslo_vmware.api [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Task: {'id': task-4891426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080316} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.052537] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1730.052722] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1730.052892] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1730.053079] env[61570]: INFO nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1730.055774] env[61570]: DEBUG nova.compute.claims [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1730.055968] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.056202] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.078149] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1730.141446] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1730.217067] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1730.217281] env[61570]: DEBUG oslo_vmware.rw_handles [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1730.390572] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95390f6-bcc5-43bc-9b12-bf914e2d4454 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.399319] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d38386a-fb5b-4563-b28a-0e0f05d8534a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.430018] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664150d1-9906-4964-8abd-2d664a06c93c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.438358] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4123e37e-1c4e-4529-91d8-ad0b2f557538 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.452189] env[61570]: DEBUG nova.compute.provider_tree [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.461443] env[61570]: DEBUG nova.scheduler.client.report [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1730.477962] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.478527] env[61570]: ERROR nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1730.478527] env[61570]: Faults: ['InvalidArgument'] [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Traceback (most recent call last): [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self.driver.spawn(context, instance, image_meta, [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self._fetch_image_if_missing(context, vi) [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] image_cache(vi, tmp_image_ds_loc) [ 1730.478527] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] vm_util.copy_virtual_disk( [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] session._wait_for_task(vmdk_copy_task) [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return self.wait_for_task(task_ref) [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return evt.wait() [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] result = hub.switch() [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] return self.greenlet.switch() [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1730.478905] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] self.f(*self.args, **self.kw) [ 1730.479351] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1730.479351] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] raise exceptions.translate_fault(task_info.error) [ 1730.479351] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1730.479351] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Faults: ['InvalidArgument'] [ 1730.479351] env[61570]: ERROR nova.compute.manager [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] [ 1730.479508] env[61570]: DEBUG nova.compute.utils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1730.481171] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Build of instance 5f5232f8-60f4-472f-ab6e-6273904481e8 was re-scheduled: A specified parameter was not correct: fileType [ 1730.481171] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1730.481619] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1730.481868] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1730.482063] env[61570]: DEBUG nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1730.482228] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1730.878647] env[61570]: DEBUG nova.network.neutron [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.900781] env[61570]: INFO nova.compute.manager [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Took 0.42 seconds to deallocate network for instance. [ 1731.010773] env[61570]: INFO nova.scheduler.client.report [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Deleted allocations for instance 5f5232f8-60f4-472f-ab6e-6273904481e8 [ 1731.037445] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1ae350ad-f9fc-4fd4-bc17-52c9534edb30 tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 564.994s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.038679] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 368.838s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.038964] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Acquiring lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.039225] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.039421] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.041863] env[61570]: INFO nova.compute.manager [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Terminating instance [ 1731.043815] env[61570]: DEBUG nova.compute.manager [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1731.044062] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1731.044760] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3185983a-373d-48da-9127-2e2e742a53cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.057187] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45eef8d-a859-4332-8a0d-7615fde8de22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.068404] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1731.098341] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f5232f8-60f4-472f-ab6e-6273904481e8 could not be found. [ 1731.098560] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1731.098743] env[61570]: INFO nova.compute.manager [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1731.099010] env[61570]: DEBUG oslo.service.loopingcall [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1731.099341] env[61570]: DEBUG nova.compute.manager [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1731.099430] env[61570]: DEBUG nova.network.neutron [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1731.119840] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.120136] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.121750] env[61570]: INFO nova.compute.claims [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1731.136902] env[61570]: DEBUG nova.network.neutron [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.154564] env[61570]: INFO nova.compute.manager [-] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] Took 0.05 seconds to deallocate network for instance. [ 1731.253057] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8da7a9d4-7817-40c2-bd3e-c94fef0d81ea tempest-ServerGroupTestJSON-1007615863 tempest-ServerGroupTestJSON-1007615863-project-member] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.214s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.254168] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.047s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.254385] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 5f5232f8-60f4-472f-ab6e-6273904481e8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1731.254567] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "5f5232f8-60f4-472f-ab6e-6273904481e8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.398911] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4270b438-d1b6-421d-9916-5496d8258476 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.407311] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c224888a-508a-4381-9e3f-d268a782d300 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.440777] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1316f7d5-26d1-4b03-8ca4-08760f097296 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.448814] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d175c647-3b01-43f7-9419-a7415b4d8499 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.463014] env[61570]: DEBUG nova.compute.provider_tree [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.471739] env[61570]: DEBUG nova.scheduler.client.report [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1731.487154] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.367s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.487713] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1731.545286] env[61570]: DEBUG nova.compute.utils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1731.547010] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1731.547010] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1731.557083] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1731.627983] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1731.653958] env[61570]: DEBUG nova.policy [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eddabcdffd3a4a3a82f8edf69114ff3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0a2f1cca5f94645bcee541eb75b23bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1731.659732] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1731.659997] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1731.660170] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.660349] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1731.660495] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.660638] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1731.660843] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1731.661008] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1731.661181] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1731.661491] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1731.661760] env[61570]: DEBUG nova.virt.hardware [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1731.662840] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eafc92-675a-4f58-aec0-229de004ffc3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.672014] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e07fb1-4b80-493a-9288-9cdded255205 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.024611] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Successfully created port: e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1732.888709] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Successfully updated port: e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1732.903330] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.903524] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquired lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.903689] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1732.930095] env[61570]: DEBUG nova.compute.manager [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Received event network-vif-plugged-e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1732.930320] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Acquiring lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.930534] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.930703] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.930865] env[61570]: DEBUG nova.compute.manager [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] No waiting events found dispatching network-vif-plugged-e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1732.931045] env[61570]: WARNING nova.compute.manager [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Received unexpected event network-vif-plugged-e6f38b4b-7f77-4093-a349-e2e9d0e60b37 for instance with vm_state building and task_state spawning. [ 1732.931207] env[61570]: DEBUG nova.compute.manager [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Received event network-changed-e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1732.931397] env[61570]: DEBUG nova.compute.manager [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Refreshing instance network info cache due to event network-changed-e6f38b4b-7f77-4093-a349-e2e9d0e60b37. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1732.931570] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Acquiring lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.182430] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1733.423262] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Updating instance_info_cache with network_info: [{"id": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "address": "fa:16:3e:00:af:9d", "network": {"id": "a3d1e32f-ebf9-44f9-84eb-969866afc6ec", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-695839231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0a2f1cca5f94645bcee541eb75b23bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6f38b4b-7f", "ovs_interfaceid": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.437891] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Releasing lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.438232] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance network_info: |[{"id": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "address": "fa:16:3e:00:af:9d", "network": {"id": "a3d1e32f-ebf9-44f9-84eb-969866afc6ec", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-695839231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0a2f1cca5f94645bcee541eb75b23bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6f38b4b-7f", "ovs_interfaceid": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1733.438534] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Acquired lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.438714] env[61570]: DEBUG nova.network.neutron [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Refreshing network info cache for port e6f38b4b-7f77-4093-a349-e2e9d0e60b37 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1733.439865] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:af:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6f38b4b-7f77-4093-a349-e2e9d0e60b37', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1733.453563] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Creating folder: Project (b0a2f1cca5f94645bcee541eb75b23bc). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1733.455421] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35e37d09-97bf-4202-84e7-78b939a7af50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.471221] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Created folder: Project (b0a2f1cca5f94645bcee541eb75b23bc) in parent group-v953072. [ 1733.471454] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Creating folder: Instances. Parent ref: group-v953172. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1733.471703] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23066841-0596-43b0-af89-65026de0ac8c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.481505] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Created folder: Instances in parent group-v953172. [ 1733.481786] env[61570]: DEBUG oslo.service.loopingcall [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1733.481975] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1733.482422] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa85dfa4-9209-4a7c-8edc-490c1cfa85e1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.506099] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1733.506099] env[61570]: value = "task-4891429" [ 1733.506099] env[61570]: _type = "Task" [ 1733.506099] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.514716] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891429, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.730534] env[61570]: DEBUG nova.network.neutron [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Updated VIF entry in instance network info cache for port e6f38b4b-7f77-4093-a349-e2e9d0e60b37. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1733.730971] env[61570]: DEBUG nova.network.neutron [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Updating instance_info_cache with network_info: [{"id": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "address": "fa:16:3e:00:af:9d", "network": {"id": "a3d1e32f-ebf9-44f9-84eb-969866afc6ec", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-695839231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0a2f1cca5f94645bcee541eb75b23bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6f38b4b-7f", "ovs_interfaceid": "e6f38b4b-7f77-4093-a349-e2e9d0e60b37", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.741694] env[61570]: DEBUG oslo_concurrency.lockutils [req-d5678068-8bb9-4802-ace8-349227cf188d req-3043138f-ef03-4576-88e7-c0e31c3e212f service nova] Releasing lock "refresh_cache-f01923b4-11f1-412e-bc5f-070e0fbb8a6a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.017065] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891429, 'name': CreateVM_Task, 'duration_secs': 0.321594} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.017065] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1734.017319] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.017319] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.017620] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1734.017879] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44bf1ed8-9272-4f51-9f96-5b0655100827 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.023140] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for the task: (returnval){ [ 1734.023140] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]524aed9d-67ea-58af-a26b-3ef0f249885b" [ 1734.023140] env[61570]: _type = "Task" [ 1734.023140] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.032348] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]524aed9d-67ea-58af-a26b-3ef0f249885b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.534021] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.534388] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.534498] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.715622] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.307831] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "15cc451d-9419-4952-83a4-4fde3d237f8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.308305] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.644311] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "8133bec0-155c-4ffe-b972-adabe3b281dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.644630] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Lock "8133bec0-155c-4ffe-b972-adabe3b281dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.617681] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.764948] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.764948] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1770.753257] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1770.753439] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1770.766270] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.766609] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.766609] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.766780] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1770.767860] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d953a967-c7db-469c-9498-996ec3ffed3c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.777078] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764544d6-e833-4efc-8c35-0d27be89c07a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.792270] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429a368-f0db-40fd-8697-bbd8544e1a7a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.799243] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09a7931-9523-4a88-80c7-bd37a90bb1e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.830467] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180592MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1770.830636] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.830829] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.911045] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.911220] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.911418] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.911576] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.911779] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.912326] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.912326] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.912326] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.912326] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.912545] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1770.928243] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.939200] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 05d44e9d-c370-4d48-9f16-40191ece6f80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.950174] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.961263] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f46d0506-529b-47d0-ad10-c152bb9005f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.971599] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.981777] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1770.982016] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1770.982184] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '73', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'io_workload': '10', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1770.999224] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1771.014115] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1771.014331] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1771.025974] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1771.055470] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1771.255750] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcce8f4-d808-48d6-a0ea-49c5e0b3132d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.263981] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe439424-d897-45af-abec-25262e889c11 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.293951] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6dbd83-a1af-472f-92eb-6825552b66ee {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.301797] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a176209-0174-4107-91fe-eaed27596b04 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.316271] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1771.325272] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1771.342833] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1771.343050] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.512s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.343227] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.343627] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.753207] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.753468] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1773.753584] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1773.786165] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.786414] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.786625] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.786826] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787037] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787238] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787431] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787617] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787804] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.787989] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1773.788194] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1773.788755] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.753576] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.750287] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.889183] env[61570]: WARNING oslo_vmware.rw_handles [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1779.889183] env[61570]: ERROR oslo_vmware.rw_handles [ 1779.889657] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1779.891537] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1779.891820] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Copying Virtual Disk [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/7cdd166c-e798-4b29-ada2-e5a5aecf1d33/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1779.892135] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4421d9d1-50ee-408b-9558-9feca2f9b461 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.900590] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 1779.900590] env[61570]: value = "task-4891430" [ 1779.900590] env[61570]: _type = "Task" [ 1779.900590] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.909677] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.411113] env[61570]: DEBUG oslo_vmware.exceptions [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1780.411410] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.412062] env[61570]: ERROR nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1780.412062] env[61570]: Faults: ['InvalidArgument'] [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Traceback (most recent call last): [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] yield resources [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self.driver.spawn(context, instance, image_meta, [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self._fetch_image_if_missing(context, vi) [ 1780.412062] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] image_cache(vi, tmp_image_ds_loc) [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] vm_util.copy_virtual_disk( [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] session._wait_for_task(vmdk_copy_task) [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return self.wait_for_task(task_ref) [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return evt.wait() [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] result = hub.switch() [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1780.412551] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return self.greenlet.switch() [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self.f(*self.args, **self.kw) [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] raise exceptions.translate_fault(task_info.error) [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Faults: ['InvalidArgument'] [ 1780.413135] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] [ 1780.413135] env[61570]: INFO nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Terminating instance [ 1780.413978] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.414230] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1780.414818] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ecd1bcb-4f57-476c-9c69-65b849963409 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.417044] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1780.417243] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1780.418012] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9733e729-e26e-4735-9ad9-e47e67f626ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.425306] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1780.425575] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-883426f4-6269-440f-a4f1-b4265473e66b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.428073] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1780.428254] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1780.429297] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6bc4068-233e-49f6-94c2-288123438043 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.435386] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1780.435386] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c0a7a4-7494-51da-d32c-ea2eaf3ceb0f" [ 1780.435386] env[61570]: _type = "Task" [ 1780.435386] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.443021] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c0a7a4-7494-51da-d32c-ea2eaf3ceb0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.565344] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1780.565567] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1780.565764] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleting the datastore file [datastore2] f3c3dfaa-bc34-4a73-acff-67fe5c8c490e {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.566058] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c323c95c-82ff-4784-a172-0011070725a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.572853] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 1780.572853] env[61570]: value = "task-4891432" [ 1780.572853] env[61570]: _type = "Task" [ 1780.572853] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.581723] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.947467] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1780.947744] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating directory with path [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1780.947998] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f760ffe6-3243-4cf9-9f86-906dcd54b94d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.960830] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Created directory with path [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1780.961039] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Fetch image to [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1780.961221] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1780.962087] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6328923-be11-4b91-91e6-20002bf5970e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.969969] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79bfd8f-3eb0-469a-9d70-2d06a3294938 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.980713] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc8d70e-e696-4496-a97c-8bd0d2be59fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.013963] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ccfa1a-4e57-4d87-b8f7-1b3b252d0988 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.020797] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8516ce22-f758-480c-af0d-4797bce6bf4b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.046809] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1781.086686] env[61570]: DEBUG oslo_vmware.api [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075082} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.086945] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.087183] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1781.087362] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1781.087533] env[61570]: INFO nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Took 0.67 seconds to destroy the instance on the hypervisor. [ 1781.089778] env[61570]: DEBUG nova.compute.claims [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1781.089952] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.090270] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.109414] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1781.172123] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1781.172264] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1781.393342] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91c152c-dc8f-4e57-a915-89a574fbe9f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.401312] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f6263d-523c-47ef-9aa9-7a2dedd8dc29 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.431476] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fecc29-15f0-41cd-92e5-88d141752209 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.439481] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bf00e0-4579-49dd-a622-33035cde23e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.455070] env[61570]: DEBUG nova.compute.provider_tree [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1781.464766] env[61570]: DEBUG nova.scheduler.client.report [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1781.479546] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.389s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.480091] env[61570]: ERROR nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1781.480091] env[61570]: Faults: ['InvalidArgument'] [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Traceback (most recent call last): [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self.driver.spawn(context, instance, image_meta, [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self._fetch_image_if_missing(context, vi) [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] image_cache(vi, tmp_image_ds_loc) [ 1781.480091] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] vm_util.copy_virtual_disk( [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] session._wait_for_task(vmdk_copy_task) [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return self.wait_for_task(task_ref) [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return evt.wait() [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] result = hub.switch() [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] return self.greenlet.switch() [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1781.480538] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] self.f(*self.args, **self.kw) [ 1781.481012] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1781.481012] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] raise exceptions.translate_fault(task_info.error) [ 1781.481012] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1781.481012] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Faults: ['InvalidArgument'] [ 1781.481012] env[61570]: ERROR nova.compute.manager [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] [ 1781.481012] env[61570]: DEBUG nova.compute.utils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1781.482832] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Build of instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e was re-scheduled: A specified parameter was not correct: fileType [ 1781.482832] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1781.483247] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1781.483419] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1781.483587] env[61570]: DEBUG nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1781.483745] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1781.811926] env[61570]: DEBUG nova.network.neutron [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.825331] env[61570]: INFO nova.compute.manager [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Took 0.34 seconds to deallocate network for instance. [ 1781.962313] env[61570]: INFO nova.scheduler.client.report [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleted allocations for instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e [ 1781.989162] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7727088-772e-4fe3-aac0-95eaea651a32 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 588.790s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.990453] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 393.266s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.990671] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.990877] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.991054] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.993176] env[61570]: INFO nova.compute.manager [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Terminating instance [ 1781.995618] env[61570]: DEBUG nova.compute.manager [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1781.995951] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1781.996168] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-753ebf1e-aee4-4629-a607-6fd06ee96826 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.006988] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44eb18ae-2d66-40b7-a642-8171b550b522 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.018872] env[61570]: DEBUG nova.compute.manager [None req-ed21eab6-b7ef-495a-89d6-08cc736d3ac6 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: 164797e5-5e37-4573-9f57-8f90a0508f99] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1782.042209] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f3c3dfaa-bc34-4a73-acff-67fe5c8c490e could not be found. [ 1782.042536] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1782.042788] env[61570]: INFO nova.compute.manager [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1782.043063] env[61570]: DEBUG oslo.service.loopingcall [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.043316] env[61570]: DEBUG nova.compute.manager [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1782.043412] env[61570]: DEBUG nova.network.neutron [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1782.046170] env[61570]: DEBUG nova.compute.manager [None req-ed21eab6-b7ef-495a-89d6-08cc736d3ac6 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: 164797e5-5e37-4573-9f57-8f90a0508f99] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1782.069535] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ed21eab6-b7ef-495a-89d6-08cc736d3ac6 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "164797e5-5e37-4573-9f57-8f90a0508f99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.639s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.074478] env[61570]: DEBUG nova.network.neutron [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.083023] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1782.085604] env[61570]: INFO nova.compute.manager [-] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] Took 0.04 seconds to deallocate network for instance. [ 1782.136419] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.136519] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.138093] env[61570]: INFO nova.compute.claims [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1782.184644] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a3f0ae6f-6a4f-4ee8-b97a-3c183faf3a62 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.185675] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 56.977s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.185675] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f3c3dfaa-bc34-4a73-acff-67fe5c8c490e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1782.185675] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "f3c3dfaa-bc34-4a73-acff-67fe5c8c490e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.410862] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa610fd-cfbb-4a9f-b0fa-2fb5a82ae9db {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.428550] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f10b4f9-686f-493a-98c9-06567f32a103 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.459899] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a506419-4fe5-4674-a98b-fbab926eddcf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.468491] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549418b0-a846-4cab-a698-a8c699e6da8e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.483299] env[61570]: DEBUG nova.compute.provider_tree [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.491959] env[61570]: DEBUG nova.scheduler.client.report [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.511124] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.374s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.511627] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1782.549148] env[61570]: DEBUG nova.compute.utils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1782.550141] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1782.550326] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1782.562028] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1782.625600] env[61570]: DEBUG nova.policy [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffd73f0e33ee403c9cf442a4b6177e94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0374f79fe6d4946a64c2acc369178cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1782.638691] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1782.677913] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.678356] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.678470] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.678714] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.678900] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.679098] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.679355] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.679550] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.679800] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.679988] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.680207] env[61570]: DEBUG nova.virt.hardware [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.681153] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4035de-4bff-4572-9a43-56c3cc473034 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.691862] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ffe2f5-9d24-44c8-bf0f-4f95111c62b7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.723714] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "d7a13cff-f371-46d2-baea-b01a3731724a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.102921] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Successfully created port: 78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1783.860167] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Successfully updated port: 78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1783.866772] env[61570]: DEBUG nova.compute.manager [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Received event network-vif-plugged-78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1783.867075] env[61570]: DEBUG oslo_concurrency.lockutils [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] Acquiring lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.867333] env[61570]: DEBUG oslo_concurrency.lockutils [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] Lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.867574] env[61570]: DEBUG oslo_concurrency.lockutils [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] Lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.867969] env[61570]: DEBUG nova.compute.manager [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] No waiting events found dispatching network-vif-plugged-78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1783.868058] env[61570]: WARNING nova.compute.manager [req-f5ffddab-eae5-4cd0-9320-292e98309efe req-c3a510a6-df53-41d3-980a-9f093d28d868 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Received unexpected event network-vif-plugged-78e3c5df-990f-4405-bd55-ae0dcc19ced7 for instance with vm_state building and task_state deleting. [ 1783.875223] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.875378] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.875526] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1783.924274] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1784.386999] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updating instance_info_cache with network_info: [{"id": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "address": "fa:16:3e:10:1b:db", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e3c5df-99", "ovs_interfaceid": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.401501] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.401745] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance network_info: |[{"id": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "address": "fa:16:3e:10:1b:db", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e3c5df-99", "ovs_interfaceid": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1784.402224] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:1b:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '510d3c47-3615-43d5-aa5d-a279fd915e71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78e3c5df-990f-4405-bd55-ae0dcc19ced7', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1784.409584] env[61570]: DEBUG oslo.service.loopingcall [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.410143] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1784.410737] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-328aaa94-ac2a-4833-92df-6e8decedcd12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.432339] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1784.432339] env[61570]: value = "task-4891433" [ 1784.432339] env[61570]: _type = "Task" [ 1784.432339] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.441176] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891433, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.882556] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e9a4e813-fcb6-481b-bca0-93552f2439c8 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "265ab6dd-d584-4575-9e4c-e8ad7a4442f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.882814] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e9a4e813-fcb6-481b-bca0-93552f2439c8 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "265ab6dd-d584-4575-9e4c-e8ad7a4442f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.944080] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891433, 'name': CreateVM_Task, 'duration_secs': 0.343838} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.944300] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1784.944951] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.945135] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.945464] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1784.945720] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d629fff6-568d-418d-9980-959535e6fa33 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.951035] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 1784.951035] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52e322cc-8cb8-412a-2460-8f77b1f0cdae" [ 1784.951035] env[61570]: _type = "Task" [ 1784.951035] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.959505] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52e322cc-8cb8-412a-2460-8f77b1f0cdae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.461711] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.462140] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1785.462202] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.895013] env[61570]: DEBUG nova.compute.manager [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Received event network-changed-78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1785.895293] env[61570]: DEBUG nova.compute.manager [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Refreshing instance network info cache due to event network-changed-78e3c5df-990f-4405-bd55-ae0dcc19ced7. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1785.895523] env[61570]: DEBUG oslo_concurrency.lockutils [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] Acquiring lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.895701] env[61570]: DEBUG oslo_concurrency.lockutils [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] Acquired lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.895837] env[61570]: DEBUG nova.network.neutron [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Refreshing network info cache for port 78e3c5df-990f-4405-bd55-ae0dcc19ced7 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1786.220339] env[61570]: DEBUG nova.network.neutron [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updated VIF entry in instance network info cache for port 78e3c5df-990f-4405-bd55-ae0dcc19ced7. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1786.220791] env[61570]: DEBUG nova.network.neutron [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updating instance_info_cache with network_info: [{"id": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "address": "fa:16:3e:10:1b:db", "network": {"id": "5416d54d-e8a3-4aa1-8443-b47ba7b911ae", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1066830962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0374f79fe6d4946a64c2acc369178cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "510d3c47-3615-43d5-aa5d-a279fd915e71", "external-id": "nsx-vlan-transportzone-436", "segmentation_id": 436, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78e3c5df-99", "ovs_interfaceid": "78e3c5df-990f-4405-bd55-ae0dcc19ced7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.232697] env[61570]: DEBUG oslo_concurrency.lockutils [req-c5668066-2c84-401f-9432-32a555d3bb2f req-3d04712f-484d-488e-8708-092fa059a3b3 service nova] Releasing lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.084382] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.084766] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.755369] env[61570]: WARNING oslo_vmware.rw_handles [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1830.755369] env[61570]: ERROR oslo_vmware.rw_handles [ 1830.756134] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1830.757884] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1830.758180] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Copying Virtual Disk [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/89d22a79-b42c-416c-852d-c218a057bcd0/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1830.758475] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f8c23c0-2f96-4c2e-b521-645d92204717 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.768227] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1830.768227] env[61570]: value = "task-4891434" [ 1830.768227] env[61570]: _type = "Task" [ 1830.768227] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.776782] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.279249] env[61570]: DEBUG oslo_vmware.exceptions [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1831.279610] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.280200] env[61570]: ERROR nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1831.280200] env[61570]: Faults: ['InvalidArgument'] [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Traceback (most recent call last): [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] yield resources [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self.driver.spawn(context, instance, image_meta, [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self._fetch_image_if_missing(context, vi) [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1831.280200] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] image_cache(vi, tmp_image_ds_loc) [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] vm_util.copy_virtual_disk( [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] session._wait_for_task(vmdk_copy_task) [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return self.wait_for_task(task_ref) [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return evt.wait() [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] result = hub.switch() [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return self.greenlet.switch() [ 1831.280692] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self.f(*self.args, **self.kw) [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] raise exceptions.translate_fault(task_info.error) [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Faults: ['InvalidArgument'] [ 1831.281202] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] [ 1831.281202] env[61570]: INFO nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Terminating instance [ 1831.282119] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.282357] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1831.282606] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3bbe92f-e4f4-4ba9-9a71-73acb59d1839 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.285242] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1831.285445] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1831.286172] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2c82b0-1b29-4b46-9669-b2006f4a087f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.293713] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1831.293944] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-010606ca-ec9a-4c2e-bda6-465ae1fa797f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.296511] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1831.296687] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1831.297661] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-325a2ace-6d52-4eb1-894e-0ba03a3c010f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.302881] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 1831.302881] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5291ee32-959e-d2a0-25cc-1cef75a5459b" [ 1831.302881] env[61570]: _type = "Task" [ 1831.302881] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.319789] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5291ee32-959e-d2a0-25cc-1cef75a5459b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.376509] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1831.376789] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1831.376905] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleting the datastore file [datastore2] 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.377273] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81910303-56cd-41e4-a7a6-42e5eb3199b9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.383551] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 1831.383551] env[61570]: value = "task-4891436" [ 1831.383551] env[61570]: _type = "Task" [ 1831.383551] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.391315] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.753507] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.753726] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1831.813134] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1831.813506] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating directory with path [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1831.813646] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd886676-9d12-4029-95cd-a80c710a0885 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.825438] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created directory with path [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1831.825629] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Fetch image to [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1831.825800] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1831.826551] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a67594-6def-403c-923b-cbe34d9b485d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.833463] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fe5c70-7010-45a6-959a-d0f8ccefdd85 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.843492] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c0f36d-e5ff-4fb7-9ff1-377df492c385 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.874544] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50120a3a-c059-4add-9532-30a7b8da9000 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.881194] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6360d8f3-5e21-4501-92c7-7019a60e0a24 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.892361] env[61570]: DEBUG oslo_vmware.api [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': task-4891436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070459} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.892635] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.892827] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1831.893053] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1831.893249] env[61570]: INFO nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1831.895618] env[61570]: DEBUG nova.compute.claims [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1831.896245] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.896245] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.905100] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1831.959967] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1832.019484] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1832.019681] env[61570]: DEBUG oslo_vmware.rw_handles [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1832.206076] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78511749-9eb6-40a7-92d3-a9db668f23f0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.212350] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39be5b50-8583-4758-bbee-fed90c97f642 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.244097] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9abeaa-a254-4f6b-9b42-758def8fb9b4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.252295] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcba16a5-6e37-4c08-bb5b-f548ade97acd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.265789] env[61570]: DEBUG nova.compute.provider_tree [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.274468] env[61570]: DEBUG nova.scheduler.client.report [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1832.289505] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.393s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.290030] env[61570]: ERROR nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1832.290030] env[61570]: Faults: ['InvalidArgument'] [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Traceback (most recent call last): [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self.driver.spawn(context, instance, image_meta, [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self._fetch_image_if_missing(context, vi) [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] image_cache(vi, tmp_image_ds_loc) [ 1832.290030] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] vm_util.copy_virtual_disk( [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] session._wait_for_task(vmdk_copy_task) [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return self.wait_for_task(task_ref) [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return evt.wait() [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] result = hub.switch() [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] return self.greenlet.switch() [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1832.290688] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] self.f(*self.args, **self.kw) [ 1832.291193] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1832.291193] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] raise exceptions.translate_fault(task_info.error) [ 1832.291193] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1832.291193] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Faults: ['InvalidArgument'] [ 1832.291193] env[61570]: ERROR nova.compute.manager [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] [ 1832.291193] env[61570]: DEBUG nova.compute.utils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1832.292206] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Build of instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc was re-scheduled: A specified parameter was not correct: fileType [ 1832.292206] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1832.292592] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1832.292763] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1832.292932] env[61570]: DEBUG nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1832.293107] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1832.629620] env[61570]: DEBUG nova.network.neutron [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.641885] env[61570]: INFO nova.compute.manager [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Took 0.35 seconds to deallocate network for instance. [ 1832.752216] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.752528] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.763884] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.764087] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.764238] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.764414] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1832.765545] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b7da8c-10a2-4812-8360-91a1d84db94d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.770962] env[61570]: INFO nova.scheduler.client.report [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Deleted allocations for instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc [ 1832.781739] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1ae54b-698e-4a72-8926-0a0ddb402d8f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.797379] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063f2dfd-cb9e-4bc0-89e2-0853aa8ea295 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.800345] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1305345e-f261-4b94-a371-8f7cb5f54fcc tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.187s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.802286] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 442.295s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.802546] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.802892] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.803124] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.805072] env[61570]: INFO nova.compute.manager [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Terminating instance [ 1832.806864] env[61570]: DEBUG nova.compute.manager [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1832.807075] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1832.808073] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baa2d474-9854-4843-a61f-751a34d4b398 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.814978] env[61570]: DEBUG nova.compute.manager [None req-54a0d09e-58b4-48ec-b069-e032825402de tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 05d44e9d-c370-4d48-9f16-40191ece6f80] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1832.818689] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd5aff9-7076-4ba4-a17f-0f81c5ff0b29 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.828559] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c368793c-22db-43b8-8256-242b83212467 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.862507] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180593MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1832.862657] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.862853] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.865100] env[61570]: DEBUG nova.compute.manager [None req-54a0d09e-58b4-48ec-b069-e032825402de tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 05d44e9d-c370-4d48-9f16-40191ece6f80] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1832.888023] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc could not be found. [ 1832.888848] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1832.888848] env[61570]: INFO nova.compute.manager [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1832.888848] env[61570]: DEBUG oslo.service.loopingcall [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.889203] env[61570]: DEBUG nova.compute.manager [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1832.889355] env[61570]: DEBUG nova.network.neutron [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1832.898534] env[61570]: DEBUG oslo_concurrency.lockutils [None req-54a0d09e-58b4-48ec-b069-e032825402de tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "05d44e9d-c370-4d48-9f16-40191ece6f80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.481s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.907392] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1832.920863] env[61570]: DEBUG nova.network.neutron [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.935582] env[61570]: INFO nova.compute.manager [-] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] Took 0.05 seconds to deallocate network for instance. [ 1832.936734] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 04741959-c2c4-4b38-92e7-43f941818775 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937161] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937161] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937161] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937356] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937539] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937608] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937703] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.937915] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1832.949119] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1832.959088] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f46d0506-529b-47d0-ad10-c152bb9005f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1832.961000] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.973891] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1832.984589] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1832.995408] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 265ab6dd-d584-4575-9e4c-e8ad7a4442f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1833.005081] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1833.005314] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1833.005471] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] stats={'failed_builds': '77', 'num_instances': '9', 'num_vm_building': '9', 'num_task_deleting': '9', 'num_os_type_None': '9', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'io_workload': '9', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1833.031213] env[61570]: DEBUG oslo_concurrency.lockutils [None req-8ced8363-8eff-42ad-9d37-f31e5fd2ebbe tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.032280] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.824s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.032280] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc] During sync_power_state the instance has a pending task (deleting). Skip. [ 1833.032468] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "70cc1a28-e1b4-4f92-a4d5-3f51d59c14dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.183922] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21b11b2-c436-4f6a-a058-673e54319883 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.192544] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ad6891-df37-4ba5-9c27-c863fe77a462 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.222044] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64269bd-bf1c-42f6-bd0c-c76274fe3362 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.229591] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c76b612-ec23-43af-98e6-d0ab8c9c3454 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.242591] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.251235] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1833.265355] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1833.265552] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.403s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.265849] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.305s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.267298] env[61570]: INFO nova.compute.claims [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1833.505719] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440be915-0ef8-47b0-bcef-b06118a9b500 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.513721] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3e5e04-8127-46c4-8455-2988c81d9650 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.543716] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c48709-ff41-411e-911f-5002d4639a50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.551345] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b31fc2-8a95-4bf3-bd88-3d757a4bfd22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.564786] env[61570]: DEBUG nova.compute.provider_tree [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.574870] env[61570]: DEBUG nova.scheduler.client.report [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1833.590868] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.591385] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1833.623772] env[61570]: DEBUG nova.compute.utils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1833.625245] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1833.625444] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1833.634584] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1833.721485] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1833.726301] env[61570]: DEBUG nova.policy [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ee9cedabee24030bd9583b9e6ff7f07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e239f92d55742a7bf8a5bbc33ca718b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1833.749032] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1833.749032] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1833.749032] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.749276] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1833.749357] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.749527] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1833.749688] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1833.749882] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1833.750095] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1833.750295] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1833.750512] env[61570]: DEBUG nova.virt.hardware [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1833.751399] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff78e50-c7c6-42d9-b47b-3afe5f8d5afe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.761706] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07ae36a-609e-40b7-825b-ba99b26f7546 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.095892] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Successfully created port: 503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1834.270521] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.270753] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1834.270829] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1834.296894] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297084] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297220] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297347] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297469] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297587] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297702] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297819] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.297936] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.298061] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1834.298455] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1834.298713] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.298901] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.706897] env[61570]: DEBUG nova.compute.manager [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Received event network-vif-plugged-503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1834.707158] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] Acquiring lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.707440] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.707884] env[61570]: DEBUG oslo_concurrency.lockutils [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.707957] env[61570]: DEBUG nova.compute.manager [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] No waiting events found dispatching network-vif-plugged-503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1834.708246] env[61570]: WARNING nova.compute.manager [req-4a8800cd-9f3a-49e4-8c3f-d25a67aacd16 req-941ece8c-f847-49ad-b043-f8961d98a059 service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Received unexpected event network-vif-plugged-503f0b9a-19ca-4fbc-8760-c03aacadd064 for instance with vm_state building and task_state spawning. [ 1835.022709] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Successfully updated port: 503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1835.039324] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.039324] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.039324] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1835.088680] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1835.414890] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Updating instance_info_cache with network_info: [{"id": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "address": "fa:16:3e:3d:e5:6e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap503f0b9a-19", "ovs_interfaceid": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.428547] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.428874] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance network_info: |[{"id": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "address": "fa:16:3e:3d:e5:6e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap503f0b9a-19", "ovs_interfaceid": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1835.429324] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:e5:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9f208df-1fb5-4403-9796-7fd19e4bfb85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '503f0b9a-19ca-4fbc-8760-c03aacadd064', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1835.437488] env[61570]: DEBUG oslo.service.loopingcall [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.438165] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1835.438427] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddf68ad2-d432-48ed-b90f-0d7d1470824e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.460129] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1835.460129] env[61570]: value = "task-4891437" [ 1835.460129] env[61570]: _type = "Task" [ 1835.460129] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.468668] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891437, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.752635] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.970724] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891437, 'name': CreateVM_Task, 'duration_secs': 0.305234} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.972597] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1835.978188] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.978358] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.978673] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1835.978912] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bc7d341-7cba-4b05-8142-a7ff2a05412d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.983669] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 1835.983669] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a786be-889c-5625-3d1a-e1a6387965c9" [ 1835.983669] env[61570]: _type = "Task" [ 1835.983669] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.991343] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a786be-889c-5625-3d1a-e1a6387965c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.495169] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.495169] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1836.495601] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.753442] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.774571] env[61570]: DEBUG nova.compute.manager [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Received event network-changed-503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1836.774767] env[61570]: DEBUG nova.compute.manager [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Refreshing instance network info cache due to event network-changed-503f0b9a-19ca-4fbc-8760-c03aacadd064. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1836.774987] env[61570]: DEBUG oslo_concurrency.lockutils [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] Acquiring lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.775131] env[61570]: DEBUG oslo_concurrency.lockutils [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] Acquired lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.775300] env[61570]: DEBUG nova.network.neutron [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Refreshing network info cache for port 503f0b9a-19ca-4fbc-8760-c03aacadd064 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1837.101746] env[61570]: DEBUG nova.network.neutron [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Updated VIF entry in instance network info cache for port 503f0b9a-19ca-4fbc-8760-c03aacadd064. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1837.102147] env[61570]: DEBUG nova.network.neutron [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Updating instance_info_cache with network_info: [{"id": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "address": "fa:16:3e:3d:e5:6e", "network": {"id": "d6fc49c7-f0f7-4d44-b690-ac9b566a7279", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2044663182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e239f92d55742a7bf8a5bbc33ca718b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9f208df-1fb5-4403-9796-7fd19e4bfb85", "external-id": "cl2-zone-400", "segmentation_id": 400, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap503f0b9a-19", "ovs_interfaceid": "503f0b9a-19ca-4fbc-8760-c03aacadd064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.112067] env[61570]: DEBUG oslo_concurrency.lockutils [req-7085a325-f77c-476b-83b8-f841d3b60934 req-e2b4a1ac-aa04-4d68-80bf-5d1170159f8f service nova] Releasing lock "refresh_cache-788bc317-0136-42c9-b8f6-7d1a68df3109" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.749073] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.772032] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.057862] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "788bc317-0136-42c9-b8f6-7d1a68df3109" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.928488] env[61570]: WARNING oslo_vmware.rw_handles [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1879.928488] env[61570]: ERROR oslo_vmware.rw_handles [ 1879.929389] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1879.931139] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1879.931385] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Copying Virtual Disk [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/2f2d743f-226a-400e-a37c-182dce367997/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1879.931671] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89c02561-2c78-4326-b45a-e154533ba1f1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.940125] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 1879.940125] env[61570]: value = "task-4891438" [ 1879.940125] env[61570]: _type = "Task" [ 1879.940125] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.951250] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.450572] env[61570]: DEBUG oslo_vmware.exceptions [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1880.450877] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.451460] env[61570]: ERROR nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1880.451460] env[61570]: Faults: ['InvalidArgument'] [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] Traceback (most recent call last): [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] yield resources [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self.driver.spawn(context, instance, image_meta, [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self._fetch_image_if_missing(context, vi) [ 1880.451460] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] image_cache(vi, tmp_image_ds_loc) [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] vm_util.copy_virtual_disk( [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] session._wait_for_task(vmdk_copy_task) [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return self.wait_for_task(task_ref) [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return evt.wait() [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] result = hub.switch() [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1880.451901] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return self.greenlet.switch() [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self.f(*self.args, **self.kw) [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] raise exceptions.translate_fault(task_info.error) [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] Faults: ['InvalidArgument'] [ 1880.452416] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] [ 1880.452416] env[61570]: INFO nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Terminating instance [ 1880.453462] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.453700] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1880.453921] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f2146bf-f5a2-4c2b-80b5-3035e8681eae {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.456437] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1880.456634] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1880.457387] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de71f36d-6041-4fbf-a603-7bccdfd02b77 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.464398] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1880.464637] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf06a919-84e5-499b-9b71-3d8039f14eeb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.466976] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1880.467180] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1880.468177] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e6b55b-261a-492b-95a7-4619ecd44e2c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.473614] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for the task: (returnval){ [ 1880.473614] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52897be9-efb5-455c-a3e9-16abd3d8c1e7" [ 1880.473614] env[61570]: _type = "Task" [ 1880.473614] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.481626] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52897be9-efb5-455c-a3e9-16abd3d8c1e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.535803] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1880.535803] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1880.535972] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleting the datastore file [datastore2] 04741959-c2c4-4b38-92e7-43f941818775 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1880.536260] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76b4ea31-b627-4545-badd-d413af5c03b6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.545735] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 1880.545735] env[61570]: value = "task-4891440" [ 1880.545735] env[61570]: _type = "Task" [ 1880.545735] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.555997] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.986851] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1880.986851] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Creating directory with path [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1880.986851] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc21f86f-207a-41d4-9c63-1e48d1f48875 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.999618] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Created directory with path [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1880.999851] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Fetch image to [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1881.000010] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1881.000825] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e434c3-63fb-4817-b436-c803285669f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.011354] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce5a103-5c2c-4d40-8cb8-f9c3738ea742 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.021969] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bb0ac0-11bb-4214-9175-246b9818b36b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.062495] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55bcbd0-8f2d-4e92-bb2b-e93757b35b3d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.075315] env[61570]: DEBUG oslo_vmware.api [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071012} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.075768] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1881.075953] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1881.076254] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1881.076347] env[61570]: INFO nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1881.077946] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71052472-9d71-403a-8915-b0f3e9f266ab {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.080320] env[61570]: DEBUG nova.compute.claims [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1881.080512] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.080820] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.109382] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1881.180230] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1881.252424] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1881.252625] env[61570]: DEBUG oslo_vmware.rw_handles [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1881.460672] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b9d57b-318d-4baf-abe2-b1a643dd52f3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.468955] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d265d19-53a9-4d4e-9538-506778307ea3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.501591] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64012dc-af96-41be-babd-c487d1f70144 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.509933] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83afbd96-c38c-4ce9-8c5d-076837fb3bf3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.527634] env[61570]: DEBUG nova.compute.provider_tree [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.540737] env[61570]: DEBUG nova.scheduler.client.report [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1881.561770] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.563534] env[61570]: ERROR nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1881.563534] env[61570]: Faults: ['InvalidArgument'] [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] Traceback (most recent call last): [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self.driver.spawn(context, instance, image_meta, [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self._fetch_image_if_missing(context, vi) [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] image_cache(vi, tmp_image_ds_loc) [ 1881.563534] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] vm_util.copy_virtual_disk( [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] session._wait_for_task(vmdk_copy_task) [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return self.wait_for_task(task_ref) [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return evt.wait() [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] result = hub.switch() [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] return self.greenlet.switch() [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1881.564119] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] self.f(*self.args, **self.kw) [ 1881.564638] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1881.564638] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] raise exceptions.translate_fault(task_info.error) [ 1881.564638] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1881.564638] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] Faults: ['InvalidArgument'] [ 1881.564638] env[61570]: ERROR nova.compute.manager [instance: 04741959-c2c4-4b38-92e7-43f941818775] [ 1881.564638] env[61570]: DEBUG nova.compute.utils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1881.565582] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Build of instance 04741959-c2c4-4b38-92e7-43f941818775 was re-scheduled: A specified parameter was not correct: fileType [ 1881.565582] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1881.565582] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1881.565790] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1881.565905] env[61570]: DEBUG nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1881.566098] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1881.976826] env[61570]: DEBUG nova.network.neutron [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.999733] env[61570]: INFO nova.compute.manager [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Took 0.43 seconds to deallocate network for instance. [ 1882.135130] env[61570]: INFO nova.scheduler.client.report [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleted allocations for instance 04741959-c2c4-4b38-92e7-43f941818775 [ 1882.170457] env[61570]: DEBUG oslo_concurrency.lockutils [None req-26fa1fac-6135-4149-a1c2-f8585d9c8b85 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.693s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.171462] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.482s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.171462] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "04741959-c2c4-4b38-92e7-43f941818775-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.171821] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.171905] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.175857] env[61570]: INFO nova.compute.manager [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Terminating instance [ 1882.177885] env[61570]: DEBUG nova.compute.manager [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1882.178395] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1882.178395] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e34de4c4-b650-4f15-b69f-f690b8c1cd26 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.189311] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e51c74a-fcf3-4054-b21b-dc3cbab2d279 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.204435] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: f46d0506-529b-47d0-ad10-c152bb9005f0] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1882.231925] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04741959-c2c4-4b38-92e7-43f941818775 could not be found. [ 1882.232211] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1882.232441] env[61570]: INFO nova.compute.manager [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1882.233772] env[61570]: DEBUG oslo.service.loopingcall [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1882.234158] env[61570]: DEBUG nova.compute.manager [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1882.234301] env[61570]: DEBUG nova.network.neutron [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1882.239506] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: f46d0506-529b-47d0-ad10-c152bb9005f0] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1882.271671] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "f46d0506-529b-47d0-ad10-c152bb9005f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.297s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.273509] env[61570]: DEBUG nova.network.neutron [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.284807] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1882.290029] env[61570]: INFO nova.compute.manager [-] [instance: 04741959-c2c4-4b38-92e7-43f941818775] Took 0.05 seconds to deallocate network for instance. [ 1882.350504] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.350748] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.352405] env[61570]: INFO nova.compute.claims [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1882.406498] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b181144-71c0-462e-9bd4-645b2eb0f146 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "04741959-c2c4-4b38-92e7-43f941818775" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.235s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.407396] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "04741959-c2c4-4b38-92e7-43f941818775" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 157.199s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.410018] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 04741959-c2c4-4b38-92e7-43f941818775] During sync_power_state the instance has a pending task (deleting). Skip. [ 1882.410018] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "04741959-c2c4-4b38-92e7-43f941818775" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.614785] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6a179c-1ec5-4ca4-af56-fb42031fcf5b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.625819] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f17de0-0c2e-44f0-9d6b-18fd89ae8ccb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.656827] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca376bf-a642-470e-a9a0-a1af401737d5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.665131] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d90fd54-02ae-4899-9030-6ca57226079e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.680394] env[61570]: DEBUG nova.compute.provider_tree [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.691484] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1882.712680] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.362s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.713251] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1882.756375] env[61570]: DEBUG nova.compute.utils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1882.757824] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1882.758125] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1882.769768] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1882.817131] env[61570]: DEBUG nova.policy [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97aa8ed2bcb44d5ea39cf1f778289e31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb1862517c1f4d239b931bb18211f2b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1882.842393] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1882.892134] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1882.892134] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1882.892134] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1882.894621] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1882.894621] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1882.894621] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1882.894621] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1882.894621] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1882.894810] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1882.894810] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1882.894810] env[61570]: DEBUG nova.virt.hardware [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1882.894810] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d1d7f3-fea0-497a-b1ad-79589d9a1e25 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.894810] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d8ef00-894d-49c9-a448-bf64d43e19f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.206828] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "de0d43a3-122f-43de-9992-e30d2954408f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.207366] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Lock "de0d43a3-122f-43de-9992-e30d2954408f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.321963] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Successfully created port: 27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1884.032549] env[61570]: DEBUG nova.compute.manager [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Received event network-vif-plugged-27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1884.032773] env[61570]: DEBUG oslo_concurrency.lockutils [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] Acquiring lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.033092] env[61570]: DEBUG oslo_concurrency.lockutils [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.033175] env[61570]: DEBUG oslo_concurrency.lockutils [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.033339] env[61570]: DEBUG nova.compute.manager [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] No waiting events found dispatching network-vif-plugged-27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1884.033501] env[61570]: WARNING nova.compute.manager [req-28c038f1-e62b-4e19-a0e2-c9599b9b8312 req-b6a0dc01-54eb-48a9-9d2d-6b14712c4f90 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Received unexpected event network-vif-plugged-27553823-9ccb-45dc-b0ea-d58686ee0b48 for instance with vm_state building and task_state spawning. [ 1884.044572] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Successfully updated port: 27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1884.055965] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.056177] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquired lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.056935] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1884.106402] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1884.369687] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Updating instance_info_cache with network_info: [{"id": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "address": "fa:16:3e:bc:19:4f", "network": {"id": "921cccea-8eda-4a43-b5ac-0a39d620a87e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-453140127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb1862517c1f4d239b931bb18211f2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27553823-9c", "ovs_interfaceid": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.385968] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Releasing lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.386348] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance network_info: |[{"id": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "address": "fa:16:3e:bc:19:4f", "network": {"id": "921cccea-8eda-4a43-b5ac-0a39d620a87e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-453140127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb1862517c1f4d239b931bb18211f2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27553823-9c", "ovs_interfaceid": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1884.386798] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:19:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27553823-9ccb-45dc-b0ea-d58686ee0b48', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1884.394469] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Creating folder: Project (fb1862517c1f4d239b931bb18211f2b8). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1884.395288] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c6cf12d-b882-4e9d-9b99-a370415513ad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.406986] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Created folder: Project (fb1862517c1f4d239b931bb18211f2b8) in parent group-v953072. [ 1884.407205] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Creating folder: Instances. Parent ref: group-v953177. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1884.407451] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0899666-1f37-41cb-ba6e-4a5177956d12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.417023] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Created folder: Instances in parent group-v953177. [ 1884.417023] env[61570]: DEBUG oslo.service.loopingcall [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.417023] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1884.417331] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d60d3350-a9f6-4530-993b-34419fa6d3c6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.436576] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1884.436576] env[61570]: value = "task-4891443" [ 1884.436576] env[61570]: _type = "Task" [ 1884.436576] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.446346] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891443, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.946679] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891443, 'name': CreateVM_Task, 'duration_secs': 0.33449} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.946871] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1884.947529] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.947694] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.948026] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1884.948287] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad017d6d-6c03-4f28-875f-90f7a7408cde {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.952681] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for the task: (returnval){ [ 1884.952681] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52dbdf58-c5ea-3d19-f30f-ce7bdcbcd869" [ 1884.952681] env[61570]: _type = "Task" [ 1884.952681] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.960487] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52dbdf58-c5ea-3d19-f30f-ce7bdcbcd869, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.473932] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.473932] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1885.473932] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.090312] env[61570]: DEBUG nova.compute.manager [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Received event network-changed-27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1886.090589] env[61570]: DEBUG nova.compute.manager [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Refreshing instance network info cache due to event network-changed-27553823-9ccb-45dc-b0ea-d58686ee0b48. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1886.090853] env[61570]: DEBUG oslo_concurrency.lockutils [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] Acquiring lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.091017] env[61570]: DEBUG oslo_concurrency.lockutils [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] Acquired lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.091087] env[61570]: DEBUG nova.network.neutron [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Refreshing network info cache for port 27553823-9ccb-45dc-b0ea-d58686ee0b48 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1886.373334] env[61570]: DEBUG nova.network.neutron [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Updated VIF entry in instance network info cache for port 27553823-9ccb-45dc-b0ea-d58686ee0b48. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1886.373696] env[61570]: DEBUG nova.network.neutron [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Updating instance_info_cache with network_info: [{"id": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "address": "fa:16:3e:bc:19:4f", "network": {"id": "921cccea-8eda-4a43-b5ac-0a39d620a87e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-453140127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb1862517c1f4d239b931bb18211f2b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27553823-9c", "ovs_interfaceid": "27553823-9ccb-45dc-b0ea-d58686ee0b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.384375] env[61570]: DEBUG oslo_concurrency.lockutils [req-303cbbd9-9126-4f4b-8a5a-7fda7acc50b4 req-d90b5b57-aa02-456d-b33d-118a80347740 service nova] Releasing lock "refresh_cache-15cc451d-9419-4952-83a4-4fde3d237f8b" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.754118] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.766401] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.766624] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.766794] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.766954] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1892.768219] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa533f6-4eb5-4b85-928c-acc1b0d4cb8d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.777525] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5687eedc-2480-4327-a970-ccca567eee74 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.792536] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9386764f-3c2a-446d-b276-7990306d62dc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.800044] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8081840-9b57-4e92-9976-a568d0f520c2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.833217] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180589MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1892.833380] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.833586] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.909019] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909487] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909487] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909610] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909648] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909792] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.909929] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.910344] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.910344] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.910344] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1892.924191] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1892.937707] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 265ab6dd-d584-4575-9e4c-e8ad7a4442f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1892.949458] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1892.963117] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1892.963419] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1892.963551] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '79', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_97bb60f83e91408c89c70c926c236a71': '1', 'io_workload': '10', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_task_spawning': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1893.161849] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13e0a51-0ccf-45ae-9482-9fa665c69ed5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.170049] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b8bda8-32a7-43d4-90d7-9b99b0303024 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.201274] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125b813e-5365-4f2e-8ae5-3b2c0d6f647c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.210293] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4771dfb-1694-4531-917c-5d241e3307ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.226341] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.235619] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1893.252233] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1893.252429] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.419s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.252371] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.252716] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1894.753965] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.753223] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.753685] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1895.753685] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1895.776440] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.776635] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.776722] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.776857] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.776980] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777114] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777234] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777351] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777466] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777579] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1895.777695] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1895.778242] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.778425] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.753632] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.753567] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.748661] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1929.096180] env[61570]: WARNING oslo_vmware.rw_handles [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1929.096180] env[61570]: ERROR oslo_vmware.rw_handles [ 1929.096876] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1929.099051] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1929.099307] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Copying Virtual Disk [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/1e214431-3929-4740-9815-220645ab8b88/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1929.099620] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78393f22-c5a5-4329-996b-1a8ae879a7b2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.107595] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for the task: (returnval){ [ 1929.107595] env[61570]: value = "task-4891444" [ 1929.107595] env[61570]: _type = "Task" [ 1929.107595] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.116176] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Task: {'id': task-4891444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.618144] env[61570]: DEBUG oslo_vmware.exceptions [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1929.618387] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.618957] env[61570]: ERROR nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1929.618957] env[61570]: Faults: ['InvalidArgument'] [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Traceback (most recent call last): [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] yield resources [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self.driver.spawn(context, instance, image_meta, [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self._fetch_image_if_missing(context, vi) [ 1929.618957] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] image_cache(vi, tmp_image_ds_loc) [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] vm_util.copy_virtual_disk( [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] session._wait_for_task(vmdk_copy_task) [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return self.wait_for_task(task_ref) [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return evt.wait() [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] result = hub.switch() [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1929.619946] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return self.greenlet.switch() [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self.f(*self.args, **self.kw) [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] raise exceptions.translate_fault(task_info.error) [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Faults: ['InvalidArgument'] [ 1929.621619] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] [ 1929.621619] env[61570]: INFO nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Terminating instance [ 1929.621619] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.622015] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1929.622015] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-946d6dbb-cef4-4564-9aee-decbc4b749e3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.624945] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1929.624945] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1929.624945] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc919230-762d-42d0-8c74-8fff32a672a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.632559] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1929.632817] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9371e9-8a53-4491-859a-16bb9624b86d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.635157] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1929.635368] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1929.636415] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8eac269c-a526-40d9-bd54-2743019bd375 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.641449] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for the task: (returnval){ [ 1929.641449] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f77db8-baf9-d27f-a4d8-4e0785fc9438" [ 1929.641449] env[61570]: _type = "Task" [ 1929.641449] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.651221] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f77db8-baf9-d27f-a4d8-4e0785fc9438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.707597] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1929.707861] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1929.708047] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Deleting the datastore file [datastore2] fe2fe70e-6a16-4b74-9766-583f8ca87dd3 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1929.708422] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-580798a9-bb3b-4f59-b382-9953cb2bb6a6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.716355] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for the task: (returnval){ [ 1929.716355] env[61570]: value = "task-4891446" [ 1929.716355] env[61570]: _type = "Task" [ 1929.716355] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.724666] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Task: {'id': task-4891446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.151683] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1930.152022] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Creating directory with path [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1930.152141] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-469ad03c-d372-44df-a859-96a65778d778 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.164076] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Created directory with path [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1930.164318] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Fetch image to [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1930.164481] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1930.165278] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9e57a6-36c8-42b5-a30a-23f9b6b483f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.172580] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5a3d72-a498-465f-91b4-dce62131bbc8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.182583] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d62ce4-fa70-4440-b29b-4282b9e78688 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.214317] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b6e9fc-074a-4d22-8ebc-426a5a7ff3ea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.229485] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0a216356-9ea0-4476-a64d-5c21fd7bee9b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.231316] env[61570]: DEBUG oslo_vmware.api [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Task: {'id': task-4891446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066878} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.231562] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1930.231740] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1930.231909] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1930.232094] env[61570]: INFO nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1930.234301] env[61570]: DEBUG nova.compute.claims [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1930.234579] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.234579] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.257146] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1930.314167] env[61570]: DEBUG oslo_vmware.rw_handles [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1930.372673] env[61570]: DEBUG oslo_vmware.rw_handles [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1930.372869] env[61570]: DEBUG oslo_vmware.rw_handles [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1930.525994] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9aea7e-27d2-4531-893a-f2c194b4073f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.536251] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc09d81d-17e7-4adf-9a46-b62c949c3601 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.568363] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80723e6b-b580-4d4b-a3f0-6c22965113c8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.577058] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbffb740-f87c-4926-9fea-4dab0c79358d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.590671] env[61570]: DEBUG nova.compute.provider_tree [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.601250] env[61570]: DEBUG nova.scheduler.client.report [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1930.618076] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.618651] env[61570]: ERROR nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1930.618651] env[61570]: Faults: ['InvalidArgument'] [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Traceback (most recent call last): [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self.driver.spawn(context, instance, image_meta, [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self._fetch_image_if_missing(context, vi) [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] image_cache(vi, tmp_image_ds_loc) [ 1930.618651] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] vm_util.copy_virtual_disk( [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] session._wait_for_task(vmdk_copy_task) [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return self.wait_for_task(task_ref) [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return evt.wait() [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] result = hub.switch() [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] return self.greenlet.switch() [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1930.619091] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] self.f(*self.args, **self.kw) [ 1930.619782] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1930.619782] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] raise exceptions.translate_fault(task_info.error) [ 1930.619782] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1930.619782] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Faults: ['InvalidArgument'] [ 1930.619782] env[61570]: ERROR nova.compute.manager [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] [ 1930.619782] env[61570]: DEBUG nova.compute.utils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1930.620833] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Build of instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 was re-scheduled: A specified parameter was not correct: fileType [ 1930.620833] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1930.621307] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1930.621490] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1930.621660] env[61570]: DEBUG nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1930.621818] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1930.986074] env[61570]: DEBUG nova.network.neutron [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.997540] env[61570]: INFO nova.compute.manager [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Took 0.38 seconds to deallocate network for instance. [ 1931.106181] env[61570]: INFO nova.scheduler.client.report [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Deleted allocations for instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 [ 1931.129811] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1e5c8d31-acc7-4097-a3a7-d5154742b3d7 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.762s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.131208] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 385.781s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.131956] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Acquiring lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.131956] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.131956] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.134120] env[61570]: INFO nova.compute.manager [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Terminating instance [ 1931.137162] env[61570]: DEBUG nova.compute.manager [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1931.137403] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1931.137690] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e263eef5-58aa-469a-b53f-6c5966d616e0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.142366] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1931.152038] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b6d27c-e94e-4efc-9735-a42ca5c29c15 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.187912] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe2fe70e-6a16-4b74-9766-583f8ca87dd3 could not be found. [ 1931.190247] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1931.190247] env[61570]: INFO nova.compute.manager [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1931.190247] env[61570]: DEBUG oslo.service.loopingcall [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.190247] env[61570]: DEBUG nova.compute.manager [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1931.190247] env[61570]: DEBUG nova.network.neutron [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1931.236774] env[61570]: DEBUG nova.network.neutron [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.245192] env[61570]: INFO nova.compute.manager [-] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] Took 0.06 seconds to deallocate network for instance. [ 1931.247537] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.247770] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.249176] env[61570]: INFO nova.compute.claims [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1931.354905] env[61570]: DEBUG oslo_concurrency.lockutils [None req-1372ec54-2d3b-4d9c-afc0-489cb802fca3 tempest-ServerActionsTestJSON-1744571269 tempest-ServerActionsTestJSON-1744571269-project-member] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.356253] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 206.147s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.356253] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: fe2fe70e-6a16-4b74-9766-583f8ca87dd3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1931.356442] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "fe2fe70e-6a16-4b74-9766-583f8ca87dd3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.484326] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d5e72b-0a3d-40c0-be84-6e01ca642a25 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.492044] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d97faf-16c1-4a9e-95b6-10214903d049 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.521923] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b3a66f-0f12-426e-80f0-c2ebb3ab699d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.530051] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d88d69a-3183-40cc-a270-e95e8dc5264c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.543780] env[61570]: DEBUG nova.compute.provider_tree [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1931.552761] env[61570]: DEBUG nova.scheduler.client.report [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1931.567434] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.567932] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1931.603890] env[61570]: DEBUG nova.compute.utils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1931.605145] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1931.606058] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1931.614758] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1931.668974] env[61570]: DEBUG nova.policy [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15867fd837134df1b3da76542def2a88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e1c9d890ee242c9a13e7ebb409c9fb4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1931.734024] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1931.765520] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1931.765766] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1931.765926] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1931.766124] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1931.766275] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1931.766428] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1931.766639] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1931.766803] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1931.766966] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1931.767174] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1931.767357] env[61570]: DEBUG nova.virt.hardware [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1931.768314] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93b9ac-6910-422b-a667-7e0bc6f9d87c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.777205] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c56c2a-2c18-44fb-af19-7490f6f76e00 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.041495] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Successfully created port: 22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1932.808039] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Successfully updated port: 22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1932.824663] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.824820] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquired lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.824973] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1932.874383] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1933.035638] env[61570]: DEBUG nova.compute.manager [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Received event network-vif-plugged-22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1933.035871] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Acquiring lock "8133bec0-155c-4ffe-b972-adabe3b281dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.036090] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Lock "8133bec0-155c-4ffe-b972-adabe3b281dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.036261] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Lock "8133bec0-155c-4ffe-b972-adabe3b281dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.036439] env[61570]: DEBUG nova.compute.manager [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] No waiting events found dispatching network-vif-plugged-22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1933.036585] env[61570]: WARNING nova.compute.manager [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Received unexpected event network-vif-plugged-22a3ac02-7c13-47b4-a397-ab3c65b8777d for instance with vm_state building and task_state spawning. [ 1933.036746] env[61570]: DEBUG nova.compute.manager [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Received event network-changed-22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1933.036937] env[61570]: DEBUG nova.compute.manager [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Refreshing instance network info cache due to event network-changed-22a3ac02-7c13-47b4-a397-ab3c65b8777d. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1933.037339] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Acquiring lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.130530] env[61570]: DEBUG nova.network.neutron [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Updating instance_info_cache with network_info: [{"id": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "address": "fa:16:3e:d9:7e:d6", "network": {"id": "5497900d-1591-4701-8ae1-50e82239d8bd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-186851270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e1c9d890ee242c9a13e7ebb409c9fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22a3ac02-7c", "ovs_interfaceid": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.143612] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Releasing lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.143920] env[61570]: DEBUG nova.compute.manager [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Instance network_info: |[{"id": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "address": "fa:16:3e:d9:7e:d6", "network": {"id": "5497900d-1591-4701-8ae1-50e82239d8bd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-186851270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e1c9d890ee242c9a13e7ebb409c9fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22a3ac02-7c", "ovs_interfaceid": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1933.144234] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Acquired lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.144440] env[61570]: DEBUG nova.network.neutron [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Refreshing network info cache for port 22a3ac02-7c13-47b4-a397-ab3c65b8777d {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1933.145559] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:7e:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22a3ac02-7c13-47b4-a397-ab3c65b8777d', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1933.153974] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Creating folder: Project (7e1c9d890ee242c9a13e7ebb409c9fb4). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1933.157655] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a34a0fec-3dfc-459b-be76-72c7553425d2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.169481] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Created folder: Project (7e1c9d890ee242c9a13e7ebb409c9fb4) in parent group-v953072. [ 1933.169786] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Creating folder: Instances. Parent ref: group-v953180. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1933.170511] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad5d7fab-a823-4e2a-b5d0-636283d71c13 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.180280] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Created folder: Instances in parent group-v953180. [ 1933.180540] env[61570]: DEBUG oslo.service.loopingcall [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1933.180735] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1933.180943] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7880d60a-82f7-40eb-a3d7-f0079ff7202b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.202189] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1933.202189] env[61570]: value = "task-4891449" [ 1933.202189] env[61570]: _type = "Task" [ 1933.202189] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.210256] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891449, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.523627] env[61570]: DEBUG nova.network.neutron [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Updated VIF entry in instance network info cache for port 22a3ac02-7c13-47b4-a397-ab3c65b8777d. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1933.523997] env[61570]: DEBUG nova.network.neutron [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Updating instance_info_cache with network_info: [{"id": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "address": "fa:16:3e:d9:7e:d6", "network": {"id": "5497900d-1591-4701-8ae1-50e82239d8bd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-186851270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e1c9d890ee242c9a13e7ebb409c9fb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22a3ac02-7c", "ovs_interfaceid": "22a3ac02-7c13-47b4-a397-ab3c65b8777d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.535548] env[61570]: DEBUG oslo_concurrency.lockutils [req-31cd70eb-ad17-4aa1-909c-e3b0bf094451 req-7592f532-6ea8-4170-ab11-8b7f7f0a951b service nova] Releasing lock "refresh_cache-8133bec0-155c-4ffe-b972-adabe3b281dc" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.713048] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891449, 'name': CreateVM_Task, 'duration_secs': 0.330983} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.713048] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1933.713760] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.713961] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.714342] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1933.714601] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f87907-1833-41d2-aa48-15c812245579 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.719447] env[61570]: DEBUG oslo_vmware.api [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Waiting for the task: (returnval){ [ 1933.719447] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52592c81-3a3d-daa0-680c-3ac94be14c8d" [ 1933.719447] env[61570]: _type = "Task" [ 1933.719447] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.727563] env[61570]: DEBUG oslo_vmware.api [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52592c81-3a3d-daa0-680c-3ac94be14c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.231326] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.231686] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1934.231884] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.286012] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "15cc451d-9419-4952-83a4-4fde3d237f8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.647328] env[61570]: DEBUG oslo_concurrency.lockutils [None req-4647dcfc-df5e-4fbb-b27f-f6404e6d6c05 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquiring lock "8133bec0-155c-4ffe-b972-adabe3b281dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.752868] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.765514] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.765751] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.765973] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.766179] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1953.767371] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cede35d2-7d54-4cd4-ac7d-ca39f45f48a5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.776859] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03460f6-4a7a-4504-bb20-2c142538ace0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.792386] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5dfd46-75de-454b-9c00-f2b6923d31d6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.801295] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b4607c-7047-4479-bd65-32e438d621fb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.834298] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180554MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1953.834392] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.834605] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.924805] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925560] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925560] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925560] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925876] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925876] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.925876] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.926026] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.926082] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.926185] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 1953.944192] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 265ab6dd-d584-4575-9e4c-e8ad7a4442f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1953.956911] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1953.970843] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 1953.971105] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1953.971285] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '80', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_977a17d2733049fa8200053e72fc086c': '1', 'io_workload': '10', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1954.157468] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659f826f-e63c-4433-8375-de01eb68c0f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.166109] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c48b05-04d8-4132-80fa-3282dcffdbba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.196395] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f568c0a-86c7-4dad-966c-224d2c19076f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.204156] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed8fffc-3a95-47a6-91ce-5ef260fbe667 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.218336] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.228600] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1954.243962] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1954.244148] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.410s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.244875] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.245202] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 1955.753645] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.753821] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 1955.753941] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 1955.776238] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776387] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776510] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776633] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776753] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776868] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.776986] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.777118] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.777234] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.777346] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 1955.777464] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 1955.777939] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.778126] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.778290] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.753280] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.749210] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.772462] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1964.772489] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.005405] env[61570]: WARNING oslo_vmware.rw_handles [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1979.005405] env[61570]: ERROR oslo_vmware.rw_handles [ 1979.006044] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1979.008226] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1979.008483] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Copying Virtual Disk [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/b2b4a9aa-2874-4bac-a915-ba7e17ab7b4f/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1979.008777] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16e167d8-d934-42dc-a77f-fa1436aebc91 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.019025] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for the task: (returnval){ [ 1979.019025] env[61570]: value = "task-4891450" [ 1979.019025] env[61570]: _type = "Task" [ 1979.019025] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.026207] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Task: {'id': task-4891450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.529059] env[61570]: DEBUG oslo_vmware.exceptions [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1979.529325] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.529941] env[61570]: ERROR nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.529941] env[61570]: Faults: ['InvalidArgument'] [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Traceback (most recent call last): [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] yield resources [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self.driver.spawn(context, instance, image_meta, [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self._fetch_image_if_missing(context, vi) [ 1979.529941] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] image_cache(vi, tmp_image_ds_loc) [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] vm_util.copy_virtual_disk( [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] session._wait_for_task(vmdk_copy_task) [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return self.wait_for_task(task_ref) [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return evt.wait() [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] result = hub.switch() [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1979.530274] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return self.greenlet.switch() [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self.f(*self.args, **self.kw) [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] raise exceptions.translate_fault(task_info.error) [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Faults: ['InvalidArgument'] [ 1979.530583] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] [ 1979.530583] env[61570]: INFO nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Terminating instance [ 1979.532119] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.532340] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1979.532584] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aebb210-35a8-442e-a006-10fef252a4e1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.536166] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1979.536465] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1979.537180] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92895638-5637-40ed-baf7-dc701da7c7d4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.541304] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1979.541548] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1979.544190] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a0cdc9a-06cd-4b7c-92a9-55e173e4da6a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.546622] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1979.547096] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba76f4be-7c61-40dc-b2b3-3926a176f794 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.554026] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for the task: (returnval){ [ 1979.554026] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ad9568-76ce-7c02-888c-e372b511ffc8" [ 1979.554026] env[61570]: _type = "Task" [ 1979.554026] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.561051] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52ad9568-76ce-7c02-888c-e372b511ffc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.620066] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1979.620302] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1979.620487] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Deleting the datastore file [datastore2] 1ed132c9-9efe-4a40-b4da-308a7b23bb42 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1979.620770] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a08d7137-e988-4bb5-b951-f6ecfa3412a0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.627586] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for the task: (returnval){ [ 1979.627586] env[61570]: value = "task-4891452" [ 1979.627586] env[61570]: _type = "Task" [ 1979.627586] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.637372] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Task: {'id': task-4891452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.065443] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1980.065732] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Creating directory with path [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1980.066072] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc55e049-b23b-49cd-a4a2-db7588566185 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.080244] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Created directory with path [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1980.080527] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Fetch image to [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1980.080748] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1980.081596] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb13bab-a5c8-4335-b2bc-e8a277d31a85 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.089902] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e034412-247f-4a10-81b9-b7525a2d48e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.101327] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6bb6cd-a7f9-4547-b983-d93d07f4f20b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.139346] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7557dc-a739-4ba7-990a-e65aeeb5ab94 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.150397] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6f93220b-e3a1-4d1c-908b-dfddfbffd319 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.153686] env[61570]: DEBUG oslo_vmware.api [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Task: {'id': task-4891452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086536} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.154108] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1980.154405] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1980.154749] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1980.155020] env[61570]: INFO nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1980.157934] env[61570]: DEBUG nova.compute.claims [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1980.158194] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.158495] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.192942] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1980.263048] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1980.325367] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1980.325598] env[61570]: DEBUG oslo_vmware.rw_handles [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1980.460965] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8ee31-3eba-4a7b-8318-233c6590c643 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.469457] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dcc7e2-aef0-4688-b4a0-ea0257174855 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.502738] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58b13f8-d05e-4ab6-849c-a730ebd40763 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.510852] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58022cca-b74a-4b6f-9e7d-72145c55e330 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.278039] env[61570]: DEBUG nova.compute.provider_tree [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.286889] env[61570]: DEBUG nova.scheduler.client.report [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1981.302174] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.144s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.302740] env[61570]: ERROR nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1981.302740] env[61570]: Faults: ['InvalidArgument'] [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Traceback (most recent call last): [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self.driver.spawn(context, instance, image_meta, [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self._fetch_image_if_missing(context, vi) [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] image_cache(vi, tmp_image_ds_loc) [ 1981.302740] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] vm_util.copy_virtual_disk( [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] session._wait_for_task(vmdk_copy_task) [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return self.wait_for_task(task_ref) [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return evt.wait() [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] result = hub.switch() [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] return self.greenlet.switch() [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1981.303053] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] self.f(*self.args, **self.kw) [ 1981.303339] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1981.303339] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] raise exceptions.translate_fault(task_info.error) [ 1981.303339] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1981.303339] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Faults: ['InvalidArgument'] [ 1981.303339] env[61570]: ERROR nova.compute.manager [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] [ 1981.303500] env[61570]: DEBUG nova.compute.utils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1981.305346] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Build of instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 was re-scheduled: A specified parameter was not correct: fileType [ 1981.305346] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1981.305711] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1981.305880] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1981.306073] env[61570]: DEBUG nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1981.306235] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1981.818548] env[61570]: DEBUG nova.network.neutron [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.833480] env[61570]: INFO nova.compute.manager [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Took 0.53 seconds to deallocate network for instance. [ 1981.945376] env[61570]: INFO nova.scheduler.client.report [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Deleted allocations for instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 [ 1981.969793] env[61570]: DEBUG oslo_concurrency.lockutils [None req-454a637f-b355-4a3f-be88-5f7bb98652ae tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.272s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.971145] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.861s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.971381] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Acquiring lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.971631] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.971851] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.974076] env[61570]: INFO nova.compute.manager [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Terminating instance [ 1981.976463] env[61570]: DEBUG nova.compute.manager [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1981.976665] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1981.976914] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fda11bdd-cbc1-447e-bbd6-e5ff2072878e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.986439] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508a1e09-9d97-4af0-87f1-34c2becabad7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.997382] env[61570]: DEBUG nova.compute.manager [None req-e9a4e813-fcb6-481b-bca0-93552f2439c8 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 265ab6dd-d584-4575-9e4c-e8ad7a4442f4] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1982.020937] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ed132c9-9efe-4a40-b4da-308a7b23bb42 could not be found. [ 1982.021176] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1982.021354] env[61570]: INFO nova.compute.manager [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1982.021608] env[61570]: DEBUG oslo.service.loopingcall [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1982.021895] env[61570]: DEBUG nova.compute.manager [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1982.021989] env[61570]: DEBUG nova.network.neutron [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1982.024771] env[61570]: DEBUG nova.compute.manager [None req-e9a4e813-fcb6-481b-bca0-93552f2439c8 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 265ab6dd-d584-4575-9e4c-e8ad7a4442f4] Instance disappeared before build. {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1982.050461] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e9a4e813-fcb6-481b-bca0-93552f2439c8 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "265ab6dd-d584-4575-9e4c-e8ad7a4442f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.166s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.051336] env[61570]: DEBUG nova.network.neutron [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.060963] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1982.063480] env[61570]: INFO nova.compute.manager [-] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] Took 0.04 seconds to deallocate network for instance. [ 1982.111621] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.111900] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.113350] env[61570]: INFO nova.compute.claims [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1982.177429] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cd88ec4e-7254-4337-8e43-b1e2aed4ad11 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269 tempest-FloatingIPsAssociationNegativeTestJSON-1387293269-project-member] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.178377] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 256.969s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.178634] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1ed132c9-9efe-4a40-b4da-308a7b23bb42] During sync_power_state the instance has a pending task (deleting). Skip. [ 1982.178866] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1ed132c9-9efe-4a40-b4da-308a7b23bb42" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.276445] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.276808] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.323272] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e819fc9-48eb-4bf2-b90d-6f8a25839b2f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.332108] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f220fa87-8842-4947-8004-80b63aa57cea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.362256] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6ec6a8-219e-44d3-832d-00a4d5ef3bdb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.370311] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0875c2f9-f311-4613-959a-d799a41cf3cf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.385633] env[61570]: DEBUG nova.compute.provider_tree [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1982.396044] env[61570]: DEBUG nova.scheduler.client.report [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1982.412116] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.412582] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1982.446454] env[61570]: DEBUG nova.compute.utils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1982.447744] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1982.447909] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1982.456884] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1982.519603] env[61570]: DEBUG nova.policy [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f39d4ad55c0e49399dd3facf87c0a719', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35794e305c9f4380b941db6b873ec99c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 1982.522894] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1982.548796] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1982.549066] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1982.549229] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1982.549411] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1982.549553] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1982.549696] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1982.549899] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1982.550074] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1982.550241] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1982.550403] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1982.550571] env[61570]: DEBUG nova.virt.hardware [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1982.551445] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44637598-0127-468f-9180-25f003d4abbc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.559920] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdeb788f-5dda-42e0-b0bb-52e416df80e2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.875799] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Successfully created port: 1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1983.569283] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Successfully updated port: 1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1983.589875] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.590285] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.590444] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1983.655770] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1983.865809] env[61570]: DEBUG nova.network.neutron [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Updating instance_info_cache with network_info: [{"id": "1cfc7007-6607-4dda-9662-16213f29f6f9", "address": "fa:16:3e:53:b2:a5", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cfc7007-66", "ovs_interfaceid": "1cfc7007-6607-4dda-9662-16213f29f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.879940] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Releasing lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.880295] env[61570]: DEBUG nova.compute.manager [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Instance network_info: |[{"id": "1cfc7007-6607-4dda-9662-16213f29f6f9", "address": "fa:16:3e:53:b2:a5", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cfc7007-66", "ovs_interfaceid": "1cfc7007-6607-4dda-9662-16213f29f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1983.880722] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:b2:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53ef6889-a40c-40f5-a6e5-d8726606296a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cfc7007-6607-4dda-9662-16213f29f6f9', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1983.889163] env[61570]: DEBUG oslo.service.loopingcall [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.889687] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1983.889929] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36424cb9-35df-4af3-b7e5-88416dc88b2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.911435] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1983.911435] env[61570]: value = "task-4891453" [ 1983.911435] env[61570]: _type = "Task" [ 1983.911435] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.921044] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891453, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.925966] env[61570]: DEBUG nova.compute.manager [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Received event network-vif-plugged-1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1983.926274] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Acquiring lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.926489] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.926656] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.926820] env[61570]: DEBUG nova.compute.manager [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] No waiting events found dispatching network-vif-plugged-1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1983.927043] env[61570]: WARNING nova.compute.manager [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Received unexpected event network-vif-plugged-1cfc7007-6607-4dda-9662-16213f29f6f9 for instance with vm_state building and task_state spawning. [ 1983.927303] env[61570]: DEBUG nova.compute.manager [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Received event network-changed-1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 1983.927565] env[61570]: DEBUG nova.compute.manager [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Refreshing instance network info cache due to event network-changed-1cfc7007-6607-4dda-9662-16213f29f6f9. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 1983.927872] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Acquiring lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.928135] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Acquired lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.928409] env[61570]: DEBUG nova.network.neutron [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Refreshing network info cache for port 1cfc7007-6607-4dda-9662-16213f29f6f9 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1984.258889] env[61570]: DEBUG nova.network.neutron [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Updated VIF entry in instance network info cache for port 1cfc7007-6607-4dda-9662-16213f29f6f9. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1984.259827] env[61570]: DEBUG nova.network.neutron [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Updating instance_info_cache with network_info: [{"id": "1cfc7007-6607-4dda-9662-16213f29f6f9", "address": "fa:16:3e:53:b2:a5", "network": {"id": "1fdd4d11-04f4-4973-9267-2e4fe6f236fe", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1041855653-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35794e305c9f4380b941db6b873ec99c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53ef6889-a40c-40f5-a6e5-d8726606296a", "external-id": "nsx-vlan-transportzone-537", "segmentation_id": 537, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cfc7007-66", "ovs_interfaceid": "1cfc7007-6607-4dda-9662-16213f29f6f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.269880] env[61570]: DEBUG oslo_concurrency.lockutils [req-72329fea-bb52-4731-945e-fba7cbe8a4ad req-737b968a-0813-4e38-8525-3b6844702bd8 service nova] Releasing lock "refresh_cache-7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.423037] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891453, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.922531] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891453, 'name': CreateVM_Task} progress is 99%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.422351] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891453, 'name': CreateVM_Task, 'duration_secs': 1.368303} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.422510] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1985.423201] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.423368] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.423705] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1985.423962] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ab6257-0eb6-435d-bac3-fccb639e0d2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.429186] env[61570]: DEBUG oslo_vmware.api [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for the task: (returnval){ [ 1985.429186] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52714ed2-981f-eac4-8228-fee696d9a81e" [ 1985.429186] env[61570]: _type = "Task" [ 1985.429186] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.437883] env[61570]: DEBUG oslo_vmware.api [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52714ed2-981f-eac4-8228-fee696d9a81e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.940527] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.940828] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1985.941073] env[61570]: DEBUG oslo_concurrency.lockutils [None req-d7a7783c-7c7e-4a76-8952-4bcae7e3e0f2 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1998.450569] env[61570]: DEBUG oslo_concurrency.lockutils [None req-4925cfe7-9d06-4466-ba83-99937a4cd1ee tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.753424] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2014.753761] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2014.753868] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2014.766731] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.766959] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.767153] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.767309] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2014.768814] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc1f4f5-c17e-4432-82ff-8d2c202c7148 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.777563] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f7e64d-0db4-4e63-91b5-ac223ccf7812 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.791452] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0acd9b-e711-47cb-a381-8bf3aa403764 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.797920] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdc50e2-1d1c-481f-8a23-98fc778ab4f7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.828151] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180581MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2014.828342] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.828498] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.985060] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985266] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985403] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985525] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985642] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985758] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985871] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.985984] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.986115] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2014.986244] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2015.002301] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2015.013314] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2015.013540] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2015.013700] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '82', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '10', 'num_os_type_None': '10', 'num_proj_1325c2eb2c3a40e18a473bd0c4cb7bad': '1', 'io_workload': '10', 'num_proj_35794e305c9f4380b941db6b873ec99c': '2', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2015.162506] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811a95f7-1817-4320-a1de-906fe50ce099 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.170382] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392e5e65-b9e2-443f-a0db-865bdbc60b2d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.200018] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728f1e68-6811-45e8-b35a-13b7b967c2ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.208320] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2daf223b-170d-446d-9c5b-22c811acc8ef {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.223483] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.231344] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2015.248848] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2015.249058] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.421s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.752791] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.753171] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.753281] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.753547] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2016.764807] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2017.765379] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.765706] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2017.765706] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2017.786992] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787173] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787282] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787407] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787528] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787646] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787764] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.787941] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.788078] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.788118] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2017.788238] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2017.788731] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.753386] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.752955] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.748768] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.414145] env[61570]: WARNING oslo_vmware.rw_handles [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2029.414145] env[61570]: ERROR oslo_vmware.rw_handles [ 2029.414798] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2029.417268] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2029.417529] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Copying Virtual Disk [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/f69b878d-9788-4d32-a2e0-5b6dc5f77380/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2029.417848] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60fdd733-08f9-45aa-bbb9-e61f1594f844 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.428382] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for the task: (returnval){ [ 2029.428382] env[61570]: value = "task-4891454" [ 2029.428382] env[61570]: _type = "Task" [ 2029.428382] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.437763] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Task: {'id': task-4891454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.753631] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.938490] env[61570]: DEBUG oslo_vmware.exceptions [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2029.938749] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.939303] env[61570]: ERROR nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2029.939303] env[61570]: Faults: ['InvalidArgument'] [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Traceback (most recent call last): [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] yield resources [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self.driver.spawn(context, instance, image_meta, [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self._fetch_image_if_missing(context, vi) [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2029.939303] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] image_cache(vi, tmp_image_ds_loc) [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] vm_util.copy_virtual_disk( [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] session._wait_for_task(vmdk_copy_task) [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return self.wait_for_task(task_ref) [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return evt.wait() [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] result = hub.switch() [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return self.greenlet.switch() [ 2029.939662] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self.f(*self.args, **self.kw) [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] raise exceptions.translate_fault(task_info.error) [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Faults: ['InvalidArgument'] [ 2029.940200] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] [ 2029.940200] env[61570]: INFO nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Terminating instance [ 2029.941239] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.941518] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.943236] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2029.943430] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2029.943663] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e61b5e79-9b7a-4f10-b4c5-0f0883177581 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.946242] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a2c9f8-d99e-437f-a47c-9c39819ecbc8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.953971] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2029.953971] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f692fffd-7c92-41d9-9000-bbda2c01f6f2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.955845] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.956032] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2029.957046] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47afe9b6-7ee6-40f5-814d-8131fd900593 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.961902] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for the task: (returnval){ [ 2029.961902] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52dbe66a-baff-8864-9a05-552cbc876163" [ 2029.961902] env[61570]: _type = "Task" [ 2029.961902] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.969207] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52dbe66a-baff-8864-9a05-552cbc876163, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.025767] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2030.026065] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2030.026283] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Deleting the datastore file [datastore2] 431ffe34-71c4-4b44-a83c-59895fef3fc7 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2030.026624] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe9ff93e-c814-44ce-a88f-5b5fefe69cc7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.033642] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for the task: (returnval){ [ 2030.033642] env[61570]: value = "task-4891456" [ 2030.033642] env[61570]: _type = "Task" [ 2030.033642] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.041759] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Task: {'id': task-4891456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.472418] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2030.472802] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Creating directory with path [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2030.472862] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a96eb94-d16d-40ac-8e99-f259b0d9cf12 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.484504] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Created directory with path [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2030.484691] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Fetch image to [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2030.484861] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2030.485637] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221de424-c91d-4b8a-9ff4-5e3e3d4ae229 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.493316] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797ad975-4053-4c46-95a2-7b1491b8a8cf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.502193] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ea541f-a405-4764-8f5b-247865db31eb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.532330] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d363812-f224-4ba9-85a8-cc4bb89bc307 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.543777] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6f7b21ef-38e2-4b3f-baf8-fbc3ff79ddb5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.545535] env[61570]: DEBUG oslo_vmware.api [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Task: {'id': task-4891456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079821} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.545773] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2030.545949] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2030.546131] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2030.546305] env[61570]: INFO nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2030.548440] env[61570]: DEBUG nova.compute.claims [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2030.548613] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.548823] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.572570] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2030.624014] env[61570]: DEBUG oslo_vmware.rw_handles [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2030.683505] env[61570]: DEBUG oslo_vmware.rw_handles [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2030.683705] env[61570]: DEBUG oslo_vmware.rw_handles [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2030.799780] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d6a264-33ab-4c3d-a163-19632071abf7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.807844] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df3b17a-d00c-4fa4-b2d2-d1b6f0f69f8e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.838358] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745b7ea4-8584-48ff-9745-243c6d753839 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.846226] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258830a6-3141-47ad-8507-b480051128bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.859821] env[61570]: DEBUG nova.compute.provider_tree [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2030.868537] env[61570]: DEBUG nova.scheduler.client.report [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2030.881373] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.881900] env[61570]: ERROR nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2030.881900] env[61570]: Faults: ['InvalidArgument'] [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Traceback (most recent call last): [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self.driver.spawn(context, instance, image_meta, [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self._fetch_image_if_missing(context, vi) [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] image_cache(vi, tmp_image_ds_loc) [ 2030.881900] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] vm_util.copy_virtual_disk( [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] session._wait_for_task(vmdk_copy_task) [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return self.wait_for_task(task_ref) [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return evt.wait() [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] result = hub.switch() [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] return self.greenlet.switch() [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2030.882219] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] self.f(*self.args, **self.kw) [ 2030.882527] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2030.882527] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] raise exceptions.translate_fault(task_info.error) [ 2030.882527] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2030.882527] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Faults: ['InvalidArgument'] [ 2030.882527] env[61570]: ERROR nova.compute.manager [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] [ 2030.882675] env[61570]: DEBUG nova.compute.utils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2030.884187] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Build of instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 was re-scheduled: A specified parameter was not correct: fileType [ 2030.884187] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2030.884578] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2030.884754] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2030.884931] env[61570]: DEBUG nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2030.885113] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2031.298483] env[61570]: DEBUG nova.network.neutron [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.310207] env[61570]: INFO nova.compute.manager [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Took 0.42 seconds to deallocate network for instance. [ 2031.406962] env[61570]: INFO nova.scheduler.client.report [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Deleted allocations for instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 [ 2031.433617] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c97d3fb1-f012-4e8b-b47d-91f5d6efcc49 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.297s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.434865] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.364s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.435117] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Acquiring lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.435365] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.435545] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.437875] env[61570]: INFO nova.compute.manager [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Terminating instance [ 2031.439479] env[61570]: DEBUG nova.compute.manager [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2031.439873] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2031.440807] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d4a3699-6bbf-45c8-acd4-5d846003187b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.451559] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b92122-715e-4ba1-bb4e-24c9c2ad7d08 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.465554] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2031.487736] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 431ffe34-71c4-4b44-a83c-59895fef3fc7 could not be found. [ 2031.488019] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2031.488181] env[61570]: INFO nova.compute.manager [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2031.488424] env[61570]: DEBUG oslo.service.loopingcall [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.488654] env[61570]: DEBUG nova.compute.manager [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2031.488750] env[61570]: DEBUG nova.network.neutron [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2031.513921] env[61570]: DEBUG nova.network.neutron [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.521593] env[61570]: INFO nova.compute.manager [-] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] Took 0.03 seconds to deallocate network for instance. [ 2031.526838] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.527093] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.528455] env[61570]: INFO nova.compute.claims [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2031.617929] env[61570]: DEBUG oslo_concurrency.lockutils [None req-e5009c11-7d42-4f5b-ab50-fdeec7a06174 tempest-ImagesTestJSON-323194031 tempest-ImagesTestJSON-323194031-project-member] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.618958] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 306.410s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.619179] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 431ffe34-71c4-4b44-a83c-59895fef3fc7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2031.619407] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "431ffe34-71c4-4b44-a83c-59895fef3fc7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.728170] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a750dc94-57e4-4b35-9689-a25a0cd08e08 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.737356] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f507f761-92b4-4770-b35a-f56422e5bbbd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.770697] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0c7607-a2f4-41d8-a19d-0948322d4147 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.780029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534d9258-0de0-46c8-9984-876cc3fafcfd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.793969] env[61570]: DEBUG nova.compute.provider_tree [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.804703] env[61570]: DEBUG nova.scheduler.client.report [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2031.822366] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.822888] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2031.863023] env[61570]: DEBUG nova.compute.utils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2031.864236] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2031.864431] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2031.874295] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2031.946264] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2031.968618] env[61570]: DEBUG nova.policy [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39a39e7e7cf5490e9d7c69c611551e87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e11c8be84d5d443f9d287ef7c345fe9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 2031.979415] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2031.979724] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2031.979889] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2031.980083] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2031.980232] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2031.980377] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2031.980583] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2031.980739] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2031.981077] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2031.981161] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2031.981332] env[61570]: DEBUG nova.virt.hardware [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2031.982568] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bcc585-97ac-4ce0-95ec-014e09c6455d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.991142] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae58b679-3814-4f1a-a359-0a7ecff0ea53 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.354475] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Successfully created port: 059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2033.072293] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Successfully updated port: 059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2033.094615] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.094758] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquired lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.094926] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2033.166887] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2033.342140] env[61570]: DEBUG nova.compute.manager [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Received event network-vif-plugged-059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2033.343808] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Acquiring lock "de0d43a3-122f-43de-9992-e30d2954408f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.344232] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Lock "de0d43a3-122f-43de-9992-e30d2954408f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.344415] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Lock "de0d43a3-122f-43de-9992-e30d2954408f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.345139] env[61570]: DEBUG nova.compute.manager [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] No waiting events found dispatching network-vif-plugged-059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2033.345368] env[61570]: WARNING nova.compute.manager [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Received unexpected event network-vif-plugged-059b121c-c625-44ea-bb5b-8c0b270a9cfc for instance with vm_state building and task_state spawning. [ 2033.345575] env[61570]: DEBUG nova.compute.manager [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Received event network-changed-059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2033.345712] env[61570]: DEBUG nova.compute.manager [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Refreshing instance network info cache due to event network-changed-059b121c-c625-44ea-bb5b-8c0b270a9cfc. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2033.345902] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Acquiring lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.407514] env[61570]: DEBUG nova.network.neutron [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Updating instance_info_cache with network_info: [{"id": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "address": "fa:16:3e:ec:53:03", "network": {"id": "8bb7b3f1-a4fe-4ed2-9a81-8b5a7894810d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-409987000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e11c8be84d5d443f9d287ef7c345fe9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059b121c-c6", "ovs_interfaceid": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.421497] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Releasing lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.421843] env[61570]: DEBUG nova.compute.manager [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Instance network_info: |[{"id": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "address": "fa:16:3e:ec:53:03", "network": {"id": "8bb7b3f1-a4fe-4ed2-9a81-8b5a7894810d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-409987000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e11c8be84d5d443f9d287ef7c345fe9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059b121c-c6", "ovs_interfaceid": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2033.422253] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Acquired lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.422428] env[61570]: DEBUG nova.network.neutron [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Refreshing network info cache for port 059b121c-c625-44ea-bb5b-8c0b270a9cfc {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2033.423539] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:53:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '059b121c-c625-44ea-bb5b-8c0b270a9cfc', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2033.431230] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Creating folder: Project (e11c8be84d5d443f9d287ef7c345fe9a). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2033.434982] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ba49d10-784f-4552-94b4-da7b399de689 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.445818] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Created folder: Project (e11c8be84d5d443f9d287ef7c345fe9a) in parent group-v953072. [ 2033.445987] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Creating folder: Instances. Parent ref: group-v953184. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2033.446253] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e27d256-86dd-4591-a88c-909257d253a5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.455148] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Created folder: Instances in parent group-v953184. [ 2033.455407] env[61570]: DEBUG oslo.service.loopingcall [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.455592] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2033.455797] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96bf91c0-e3ae-4574-871f-ce277011859e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.479074] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2033.479074] env[61570]: value = "task-4891459" [ 2033.479074] env[61570]: _type = "Task" [ 2033.479074] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.486866] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891459, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.779575] env[61570]: DEBUG nova.network.neutron [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Updated VIF entry in instance network info cache for port 059b121c-c625-44ea-bb5b-8c0b270a9cfc. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2033.780081] env[61570]: DEBUG nova.network.neutron [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Updating instance_info_cache with network_info: [{"id": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "address": "fa:16:3e:ec:53:03", "network": {"id": "8bb7b3f1-a4fe-4ed2-9a81-8b5a7894810d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-409987000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e11c8be84d5d443f9d287ef7c345fe9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap059b121c-c6", "ovs_interfaceid": "059b121c-c625-44ea-bb5b-8c0b270a9cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.789658] env[61570]: DEBUG oslo_concurrency.lockutils [req-decb3b79-9061-4d98-84b1-0c016ddc8c78 req-38eaceda-9088-42d9-bbaf-f6ea73568b12 service nova] Releasing lock "refresh_cache-de0d43a3-122f-43de-9992-e30d2954408f" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.990229] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891459, 'name': CreateVM_Task, 'duration_secs': 0.304747} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.990396] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2033.991031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.991199] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.991528] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2033.991785] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dbb6fce-054d-4507-9533-06c7f2b3dd7c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.996769] env[61570]: DEBUG oslo_vmware.api [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Waiting for the task: (returnval){ [ 2033.996769] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522c8599-cb28-6a3d-c63c-65819271608d" [ 2033.996769] env[61570]: _type = "Task" [ 2033.996769] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.004347] env[61570]: DEBUG oslo_vmware.api [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]522c8599-cb28-6a3d-c63c-65819271608d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.507365] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.507674] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2034.507813] env[61570]: DEBUG oslo_concurrency.lockutils [None req-a60ca8f7-7664-4214-9d52-78b1a60e29bf tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.763084] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.763084] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2047.260535] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "345f407a-879d-4c87-810a-fbad1b1d4c07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.260904] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "345f407a-879d-4c87-810a-fbad1b1d4c07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.404340] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "ece33fed-2e33-4876-83b5-7618968faa12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.404597] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "ece33fed-2e33-4876-83b5-7618968faa12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.763065] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2074.775787] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.776026] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.776201] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.776356] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2074.777489] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815d968a-30d5-4c51-bdae-f66c7d22a101 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.786293] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b89eb8-0c97-491e-b3ea-e11602aac887 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.801473] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f832f2d-b51d-4939-aafa-d590f612e776 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.808188] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f256811e-a39e-44b3-8a8d-45d343d861b3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.836661] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180600MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2074.836836] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.836991] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.920469] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.920644] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 1b211472-f426-4e7f-8f7a-70564c84e59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.920778] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.920900] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921027] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921155] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921272] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921390] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921509] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.921622] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2074.933285] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2074.943754] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2074.954647] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2074.954879] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2074.955055] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '83', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '9', 'num_os_type_None': '10', 'num_proj_35794e305c9f4380b941db6b873ec99c': '2', 'io_workload': '10', 'num_proj_34fecdc3cc7f47fdba241831e5f27f53': '1', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'num_task_spawning': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2074.971631] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2074.985963] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2074.986175] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2074.998193] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2075.016910] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2075.176029] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0902d07f-8c3d-4d10-ad19-aac7e2bc33da {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.183504] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830d2c16-070f-4345-8daa-8947779b5239 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.213700] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9d3986-3f90-4af4-9f41-60a990b94edb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.221280] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7715b1-c9b8-4705-9c87-66142959aa4e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.234980] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.244057] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2075.263341] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2075.263551] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.427s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.254194] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.254619] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2076.754120] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.754395] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.753552] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.753913] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.216133] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7db27aac-5fab-4a6e-8c20-1c9397fd4c86 tempest-InstanceActionsNegativeTestJSON-152159239 tempest-InstanceActionsNegativeTestJSON-152159239-project-member] Acquiring lock "de0d43a3-122f-43de-9992-e30d2954408f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.263864] env[61570]: WARNING oslo_vmware.rw_handles [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2079.263864] env[61570]: ERROR oslo_vmware.rw_handles [ 2079.263864] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2079.266202] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2079.266455] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Copying Virtual Disk [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/f5670de3-0908-46f8-866e-d9d1fd3678a6/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2079.266732] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5984859d-e0d0-4514-bd5c-e210782b002c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.277837] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for the task: (returnval){ [ 2079.277837] env[61570]: value = "task-4891460" [ 2079.277837] env[61570]: _type = "Task" [ 2079.277837] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.286627] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': task-4891460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.753857] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.754266] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2079.754266] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2079.776440] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.776597] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.776725] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.776858] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.776979] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777118] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777233] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777350] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777466] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777581] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2079.777704] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2079.788128] env[61570]: DEBUG oslo_vmware.exceptions [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2079.788394] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.788927] env[61570]: ERROR nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2079.788927] env[61570]: Faults: ['InvalidArgument'] [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Traceback (most recent call last): [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] yield resources [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self.driver.spawn(context, instance, image_meta, [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self._fetch_image_if_missing(context, vi) [ 2079.788927] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] image_cache(vi, tmp_image_ds_loc) [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] vm_util.copy_virtual_disk( [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] session._wait_for_task(vmdk_copy_task) [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return self.wait_for_task(task_ref) [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return evt.wait() [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] result = hub.switch() [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2079.789299] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return self.greenlet.switch() [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self.f(*self.args, **self.kw) [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] raise exceptions.translate_fault(task_info.error) [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Faults: ['InvalidArgument'] [ 2079.789646] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] [ 2079.789646] env[61570]: INFO nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Terminating instance [ 2079.790723] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.790925] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2079.791183] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66efe1dc-375a-4161-a3a8-5ac1f83ce739 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.793611] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2079.793795] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2079.794528] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657126c4-9cf4-46af-ba11-6983e1b7d1b0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.801553] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2079.801778] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3efb2c2e-1dbf-4766-8e11-3dc559e08c11 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.804075] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2079.804252] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2079.805221] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0789d9cb-691e-4e12-8861-d5fd6ea474d3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.810388] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 2079.810388] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529aac1a-8653-0cb9-63ab-14a16106feb1" [ 2079.810388] env[61570]: _type = "Task" [ 2079.810388] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.819870] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529aac1a-8653-0cb9-63ab-14a16106feb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.880259] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2079.880623] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2079.880941] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Deleting the datastore file [datastore2] efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2079.881245] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c25daf6-bb61-4261-827f-f4bfab264441 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.889877] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for the task: (returnval){ [ 2079.889877] env[61570]: value = "task-4891462" [ 2079.889877] env[61570]: _type = "Task" [ 2079.889877] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.900164] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': task-4891462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.320772] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2080.321043] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating directory with path [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2080.323144] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11bad72f-99e9-483e-a746-7512f1066e6e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.332490] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Created directory with path [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2080.332673] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Fetch image to [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2080.332840] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2080.333585] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a07dee-3bfd-4f82-aa1a-36b0e2116955 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.341062] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46fba3a-7d09-41cb-9bd6-ce796e9a7fa7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.349930] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a9418a-cdc7-47f7-a5b9-00aff0f5bb3f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.382498] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a81192-d400-4daf-beae-2607bfe19621 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.389102] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d6db3c61-f539-40b2-9321-d28a6170a1c1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.398616] env[61570]: DEBUG oslo_vmware.api [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Task: {'id': task-4891462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07045} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.398859] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2080.399054] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2080.399231] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2080.399405] env[61570]: INFO nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2080.403370] env[61570]: DEBUG nova.compute.claims [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2080.403524] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.403737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.416041] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2080.590829] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2080.650835] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2080.651119] env[61570]: DEBUG oslo_vmware.rw_handles [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2080.686498] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48c83ae-8c4c-427c-8102-3512798d5617 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.694100] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be01db90-c447-418b-b8f5-874ef34fd805 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.726725] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027b946a-ef5a-4fec-80ac-e106b4991c0f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.734506] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67fed8c6-3b22-48ae-b033-962bf2cf0dc1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.748144] env[61570]: DEBUG nova.compute.provider_tree [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.759056] env[61570]: DEBUG nova.scheduler.client.report [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2080.772660] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.775591] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.372s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.776192] env[61570]: ERROR nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2080.776192] env[61570]: Faults: ['InvalidArgument'] [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Traceback (most recent call last): [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self.driver.spawn(context, instance, image_meta, [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self._fetch_image_if_missing(context, vi) [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] image_cache(vi, tmp_image_ds_loc) [ 2080.776192] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] vm_util.copy_virtual_disk( [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] session._wait_for_task(vmdk_copy_task) [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return self.wait_for_task(task_ref) [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return evt.wait() [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] result = hub.switch() [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] return self.greenlet.switch() [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2080.776505] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] self.f(*self.args, **self.kw) [ 2080.776807] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2080.776807] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] raise exceptions.translate_fault(task_info.error) [ 2080.776807] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2080.776807] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Faults: ['InvalidArgument'] [ 2080.776807] env[61570]: ERROR nova.compute.manager [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] [ 2080.776949] env[61570]: DEBUG nova.compute.utils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2080.778689] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Build of instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc was re-scheduled: A specified parameter was not correct: fileType [ 2080.778689] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2080.779088] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2080.779262] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2080.779431] env[61570]: DEBUG nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2080.779594] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2081.127322] env[61570]: DEBUG nova.network.neutron [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.141940] env[61570]: INFO nova.compute.manager [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Took 0.36 seconds to deallocate network for instance. [ 2081.248664] env[61570]: INFO nova.scheduler.client.report [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Deleted allocations for instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc [ 2081.273508] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f0a81b46-106e-4d01-aac0-75ec49fb7688 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.816s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.274766] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 475.147s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.274993] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Acquiring lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.275224] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.275400] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.277735] env[61570]: INFO nova.compute.manager [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Terminating instance [ 2081.279656] env[61570]: DEBUG nova.compute.manager [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2081.279957] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2081.280710] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbe1bcc4-a41b-413c-bb4a-bced71649638 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.286732] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2081.294449] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2981e679-f70c-48a4-8c83-933ca8b175bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.328438] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc could not be found. [ 2081.328646] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2081.328829] env[61570]: INFO nova.compute.manager [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2081.329086] env[61570]: DEBUG oslo.service.loopingcall [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2081.331552] env[61570]: DEBUG nova.compute.manager [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2081.331658] env[61570]: DEBUG nova.network.neutron [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2081.346661] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.346840] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.348389] env[61570]: INFO nova.compute.claims [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2081.361269] env[61570]: DEBUG nova.network.neutron [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.372910] env[61570]: INFO nova.compute.manager [-] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] Took 0.04 seconds to deallocate network for instance. [ 2081.476217] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5feb25d4-f821-4c84-b99a-c25653ea3ec0 tempest-DeleteServersTestJSON-2147403172 tempest-DeleteServersTestJSON-2147403172-project-member] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.477668] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 356.268s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.477668] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc] During sync_power_state the instance has a pending task (deleting). Skip. [ 2081.477668] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "efe62f13-5ee1-4dcf-a9e1-3afe90efa4dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.589211] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b609e1-d6cf-402a-b80f-ce872067947c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.597765] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d33d578-9860-43d0-864c-3fcf8c4a8146 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.630793] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8abb1ac-df53-4194-bd72-3f434d9c11ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.638989] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dea405-617d-4784-90e1-83fe6b565edd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.654279] env[61570]: DEBUG nova.compute.provider_tree [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2081.663849] env[61570]: DEBUG nova.scheduler.client.report [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2081.679642] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.680162] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2081.715821] env[61570]: DEBUG nova.compute.utils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2081.717830] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2081.718033] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2081.749128] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2081.796930] env[61570]: DEBUG nova.policy [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a7a7f44b98e4eacb56d11d43dc3cad4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '743eeefd02e04e63850742fc5590125f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 2081.828851] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2081.856127] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2081.856652] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2081.856936] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2081.858867] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2081.858867] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2081.858867] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2081.858867] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2081.858867] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2081.859107] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2081.859107] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2081.859107] env[61570]: DEBUG nova.virt.hardware [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2081.859285] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e146ba-8fc1-454a-8aac-bdc254650c57 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.868592] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6edebc5-aa2c-4f61-a935-7cdba1aedce6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.127146] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Successfully created port: 08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2082.753270] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.886234] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Successfully updated port: 08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2082.900631] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.901482] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.901482] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2082.942394] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2083.120771] env[61570]: DEBUG nova.network.neutron [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Updating instance_info_cache with network_info: [{"id": "08ae8d46-64e6-4289-a3c8-38abb2786818", "address": "fa:16:3e:ef:aa:b7", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ae8d46-64", "ovs_interfaceid": "08ae8d46-64e6-4289-a3c8-38abb2786818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.133706] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.134046] env[61570]: DEBUG nova.compute.manager [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Instance network_info: |[{"id": "08ae8d46-64e6-4289-a3c8-38abb2786818", "address": "fa:16:3e:ef:aa:b7", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ae8d46-64", "ovs_interfaceid": "08ae8d46-64e6-4289-a3c8-38abb2786818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2083.134603] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:aa:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08ae8d46-64e6-4289-a3c8-38abb2786818', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2083.142607] env[61570]: DEBUG oslo.service.loopingcall [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2083.143136] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2083.143378] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15e4d725-0a90-4ad5-a440-864ee854d8ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.166846] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2083.166846] env[61570]: value = "task-4891463" [ 2083.166846] env[61570]: _type = "Task" [ 2083.166846] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.168797] env[61570]: DEBUG nova.compute.manager [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Received event network-vif-plugged-08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2083.169030] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Acquiring lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.169310] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.169624] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.169813] env[61570]: DEBUG nova.compute.manager [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] No waiting events found dispatching network-vif-plugged-08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2083.169984] env[61570]: WARNING nova.compute.manager [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Received unexpected event network-vif-plugged-08ae8d46-64e6-4289-a3c8-38abb2786818 for instance with vm_state building and task_state spawning. [ 2083.170159] env[61570]: DEBUG nova.compute.manager [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Received event network-changed-08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2083.170310] env[61570]: DEBUG nova.compute.manager [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Refreshing instance network info cache due to event network-changed-08ae8d46-64e6-4289-a3c8-38abb2786818. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2083.170494] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Acquiring lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.170629] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Acquired lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.170818] env[61570]: DEBUG nova.network.neutron [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Refreshing network info cache for port 08ae8d46-64e6-4289-a3c8-38abb2786818 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2083.181856] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891463, 'name': CreateVM_Task} progress is 5%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.678447] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891463, 'name': CreateVM_Task, 'duration_secs': 0.323899} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.678653] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2083.679300] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.679465] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.679786] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2083.680054] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e75119a0-8547-4773-91a0-df13a6acfdc7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.685130] env[61570]: DEBUG oslo_vmware.api [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Waiting for the task: (returnval){ [ 2083.685130] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52852c3f-51e1-8a7f-d8f6-b5b35124d788" [ 2083.685130] env[61570]: _type = "Task" [ 2083.685130] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.693143] env[61570]: DEBUG oslo_vmware.api [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52852c3f-51e1-8a7f-d8f6-b5b35124d788, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.780095] env[61570]: DEBUG nova.network.neutron [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Updated VIF entry in instance network info cache for port 08ae8d46-64e6-4289-a3c8-38abb2786818. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2083.780625] env[61570]: DEBUG nova.network.neutron [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Updating instance_info_cache with network_info: [{"id": "08ae8d46-64e6-4289-a3c8-38abb2786818", "address": "fa:16:3e:ef:aa:b7", "network": {"id": "66501ed1-c2fb-45ce-8581-9deb314c8bc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1394307089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "743eeefd02e04e63850742fc5590125f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ae8d46-64", "ovs_interfaceid": "08ae8d46-64e6-4289-a3c8-38abb2786818", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.790544] env[61570]: DEBUG oslo_concurrency.lockutils [req-78773956-703c-4643-b339-f41bf939d2f9 req-055a63c7-a23f-4624-b400-ff3cb3e390e7 service nova] Releasing lock "refresh_cache-2fde6764-2bfe-4634-b371-91bc1a5e38e4" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.198141] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.198141] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2084.198141] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ac09f76a-fe2a-4579-a6f7-147958d23e5e tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2086.750040] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.008483] env[61570]: WARNING oslo_vmware.rw_handles [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2130.008483] env[61570]: ERROR oslo_vmware.rw_handles [ 2130.009199] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2130.010797] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2130.011090] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Copying Virtual Disk [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/9ff61b2c-d3b4-4b06-8462-5e3e8a1ce1e9/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2130.011551] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b1afcf5-2b59-4fc4-ab97-b450a1178661 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.020593] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 2130.020593] env[61570]: value = "task-4891464" [ 2130.020593] env[61570]: _type = "Task" [ 2130.020593] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.029534] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.532062] env[61570]: DEBUG oslo_vmware.exceptions [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2130.532062] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.532438] env[61570]: ERROR nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.532438] env[61570]: Faults: ['InvalidArgument'] [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Traceback (most recent call last): [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] yield resources [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self.driver.spawn(context, instance, image_meta, [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self._fetch_image_if_missing(context, vi) [ 2130.532438] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] image_cache(vi, tmp_image_ds_loc) [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] vm_util.copy_virtual_disk( [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] session._wait_for_task(vmdk_copy_task) [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return self.wait_for_task(task_ref) [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return evt.wait() [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] result = hub.switch() [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2130.532818] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return self.greenlet.switch() [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self.f(*self.args, **self.kw) [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] raise exceptions.translate_fault(task_info.error) [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Faults: ['InvalidArgument'] [ 2130.533196] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] [ 2130.533196] env[61570]: INFO nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Terminating instance [ 2130.534386] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.534696] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2130.534836] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18e45721-6327-409f-8ddd-9ca9b241d5e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.537289] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2130.537535] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2130.538257] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61507765-c4ca-440d-901d-32e36445f92d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.545293] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2130.545527] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b34e1c9-9ab1-4016-b056-f7753e544724 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.547749] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2130.547921] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2130.548864] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88ecbb3b-586f-4767-bb89-f631d33b2d82 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.553815] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for the task: (returnval){ [ 2130.553815] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521d6ebe-ac13-1f78-a146-3aa3fad66157" [ 2130.553815] env[61570]: _type = "Task" [ 2130.553815] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.561015] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521d6ebe-ac13-1f78-a146-3aa3fad66157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.621245] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2130.621484] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2130.621649] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleting the datastore file [datastore2] 1b211472-f426-4e7f-8f7a-70564c84e59b {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2130.621965] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16295412-c6f0-4608-8a12-eec4631b8611 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.629020] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for the task: (returnval){ [ 2130.629020] env[61570]: value = "task-4891466" [ 2130.629020] env[61570]: _type = "Task" [ 2130.629020] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.637249] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.065063] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2131.065436] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Creating directory with path [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2131.065566] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d92befc-dcaa-4e19-a0e7-2b4be05f1c54 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.078069] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Created directory with path [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2131.078289] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Fetch image to [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2131.078460] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2131.079305] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2969d974-8614-4798-997e-69ef013e622b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.088700] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2340ff-bfd9-4d1c-bfa5-521708cb9c1d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.098874] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbc39dd-7863-48e6-a18e-caa8911329dd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.133614] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad76b302-093e-4513-8eae-be2840fa3818 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.143950] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e9335113-40ff-451e-b738-256ca1495460 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.146164] env[61570]: DEBUG oslo_vmware.api [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Task: {'id': task-4891466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07724} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.146876] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2131.146876] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2131.146876] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2131.147082] env[61570]: INFO nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2131.149614] env[61570]: DEBUG nova.compute.claims [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2131.149814] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.150061] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.172914] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2131.243409] env[61570]: DEBUG oslo_vmware.rw_handles [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2131.314097] env[61570]: DEBUG oslo_vmware.rw_handles [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2131.314474] env[61570]: DEBUG oslo_vmware.rw_handles [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2131.425621] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95f670b-1665-4d38-86d6-d471d02b2393 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.433599] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f60c68-6444-49f8-a004-71fdff84c957 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.464437] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d929935e-911d-4280-b570-eb1c8c66a06c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.472247] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2f691b-230a-4374-a913-4dd4adbf98be {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.485828] env[61570]: DEBUG nova.compute.provider_tree [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2131.494728] env[61570]: DEBUG nova.scheduler.client.report [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2131.510869] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.511435] env[61570]: ERROR nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2131.511435] env[61570]: Faults: ['InvalidArgument'] [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Traceback (most recent call last): [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self.driver.spawn(context, instance, image_meta, [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self._fetch_image_if_missing(context, vi) [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] image_cache(vi, tmp_image_ds_loc) [ 2131.511435] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] vm_util.copy_virtual_disk( [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] session._wait_for_task(vmdk_copy_task) [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return self.wait_for_task(task_ref) [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return evt.wait() [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] result = hub.switch() [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] return self.greenlet.switch() [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2131.511922] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] self.f(*self.args, **self.kw) [ 2131.512455] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2131.512455] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] raise exceptions.translate_fault(task_info.error) [ 2131.512455] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2131.512455] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Faults: ['InvalidArgument'] [ 2131.512455] env[61570]: ERROR nova.compute.manager [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] [ 2131.512455] env[61570]: DEBUG nova.compute.utils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2131.513662] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Build of instance 1b211472-f426-4e7f-8f7a-70564c84e59b was re-scheduled: A specified parameter was not correct: fileType [ 2131.513662] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2131.514035] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2131.514211] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2131.514381] env[61570]: DEBUG nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2131.514538] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2131.910400] env[61570]: DEBUG nova.network.neutron [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.925193] env[61570]: INFO nova.compute.manager [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Took 0.41 seconds to deallocate network for instance. [ 2132.032573] env[61570]: INFO nova.scheduler.client.report [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Deleted allocations for instance 1b211472-f426-4e7f-8f7a-70564c84e59b [ 2132.057657] env[61570]: DEBUG oslo_concurrency.lockutils [None req-cb1a6aff-d7ad-43a4-8bd6-c29e89022b03 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 645.639s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.059133] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 450.010s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.059247] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Acquiring lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.059383] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.059561] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.061666] env[61570]: INFO nova.compute.manager [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Terminating instance [ 2132.063646] env[61570]: DEBUG nova.compute.manager [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2132.063835] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2132.064341] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb8065d1-dd80-4d2b-a6c1-538a2276cefa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.074195] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906ca400-625f-42f6-9655-8173e4bce8cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.085257] env[61570]: DEBUG nova.compute.manager [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2132.108228] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1b211472-f426-4e7f-8f7a-70564c84e59b could not be found. [ 2132.108395] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2132.108514] env[61570]: INFO nova.compute.manager [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2132.108782] env[61570]: DEBUG oslo.service.loopingcall [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2132.109015] env[61570]: DEBUG nova.compute.manager [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2132.109118] env[61570]: DEBUG nova.network.neutron [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2132.137625] env[61570]: DEBUG nova.network.neutron [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.139438] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.139675] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.141626] env[61570]: INFO nova.compute.claims [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2132.147308] env[61570]: INFO nova.compute.manager [-] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] Took 0.04 seconds to deallocate network for instance. [ 2132.231811] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ab66a313-2819-49d1-a1f4-6eed1936f7d9 tempest-ServerDiskConfigTestJSON-645250239 tempest-ServerDiskConfigTestJSON-645250239-project-member] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.232732] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 407.023s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.232931] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 1b211472-f426-4e7f-8f7a-70564c84e59b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2132.233114] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "1b211472-f426-4e7f-8f7a-70564c84e59b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.326078] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec81348a-86f1-462e-b926-5336f5ed74ce {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.333975] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfc0dcb-27a9-4551-b2d1-6fbf3d149397 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.363990] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82384e3b-39ba-4277-afbd-7a36bd556b9c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.374055] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8772c4-50a1-4d89-b503-e20da8dfd2ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.386256] env[61570]: DEBUG nova.compute.provider_tree [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2132.395971] env[61570]: DEBUG nova.scheduler.client.report [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2132.409770] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.410268] env[61570]: DEBUG nova.compute.manager [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2132.445851] env[61570]: DEBUG nova.compute.utils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2132.447349] env[61570]: DEBUG nova.compute.manager [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Not allocating networking since 'none' was specified. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2132.456867] env[61570]: DEBUG nova.compute.manager [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2132.521293] env[61570]: DEBUG nova.compute.manager [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2132.548714] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2132.548714] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2132.548883] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2132.549143] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2132.549346] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2132.549539] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2132.549783] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2132.549980] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2132.550246] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2132.550410] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2132.550616] env[61570]: DEBUG nova.virt.hardware [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2132.551503] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab17017-6382-47e3-abc5-6c46edc6cef5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.560261] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78df992-8299-4371-85ed-d2eabc32283f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.574046] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Instance VIF info [] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2132.579656] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Creating folder: Project (d1c11dd2b9174b0a91c5f74e9410913b). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2132.580013] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3320e77-59c5-4e8b-aa2d-e92713471a9e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.589000] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Created folder: Project (d1c11dd2b9174b0a91c5f74e9410913b) in parent group-v953072. [ 2132.589251] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Creating folder: Instances. Parent ref: group-v953188. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2132.589486] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fe8a27c-3a2f-4072-8977-64862c4403dc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.597797] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Created folder: Instances in parent group-v953188. [ 2132.598116] env[61570]: DEBUG oslo.service.loopingcall [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2132.598346] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2132.598605] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29a8cb14-427c-4141-ae16-db2a0f581786 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.615195] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2132.615195] env[61570]: value = "task-4891469" [ 2132.615195] env[61570]: _type = "Task" [ 2132.615195] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.622798] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891469, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.124900] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891469, 'name': CreateVM_Task, 'duration_secs': 0.263044} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.125339] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2133.125632] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.125798] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.126152] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2133.126404] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c1709d-2376-4a10-8b11-c17e2d5a0db8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.131169] env[61570]: DEBUG oslo_vmware.api [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Waiting for the task: (returnval){ [ 2133.131169] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52631500-3af7-e566-82b8-466ce0fcd376" [ 2133.131169] env[61570]: _type = "Task" [ 2133.131169] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.139329] env[61570]: DEBUG oslo_vmware.api [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52631500-3af7-e566-82b8-466ce0fcd376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.641912] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.642175] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2133.642393] env[61570]: DEBUG oslo_concurrency.lockutils [None req-35ca2a32-c1d9-4f8c-9fca-bd755527056f tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.752989] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.765520] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.765745] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.765917] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.766094] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2135.767325] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbe638b-388c-4c7c-a48a-9827e6c0a505 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.776398] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c67bba9-0666-4bb3-b530-4b3f4a71fe36 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.790904] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd6f6d4-3410-43b7-9e6d-e12cc158d880 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.798442] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7d557b-878a-49ce-9d8b-de0f3e689352 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.830566] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180601MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2135.830746] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.830921] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.912351] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.912519] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.912647] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.912769] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.912888] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.913078] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.913232] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.913353] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.913468] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.913581] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2135.925475] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2135.925725] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2135.925912] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '85', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_9c61bebda38b4bd4b5d1d1a2068c49ba': '1', 'io_workload': '10', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1', 'num_task_spawning': '2', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_proj_d1c11dd2b9174b0a91c5f74e9410913b': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2136.068753] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddabef8-f6f4-4679-8e13-b81eb50e2d61 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.076581] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04862a9c-b888-461b-b5c9-763d15d1a951 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.106439] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d680e0-58a9-4b02-83e9-820eb63bb1ad {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.114443] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0915bc4-0104-4ccd-a81d-a7f3d5c05b1a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.130498] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.139907] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2136.158273] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2136.158490] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.328s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.159268] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.159618] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.159618] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2138.753059] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.753921] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.753921] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.753740] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.753915] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2140.754971] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2140.774511] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.774667] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.774799] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.774925] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775062] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775208] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775365] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775501] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775622] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775737] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2140.775858] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2144.753704] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.717012] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.717012] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.748928] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.439468] env[61570]: DEBUG oslo_concurrency.lockutils [None req-36d99e19-4c5e-4d4e-bb4d-1140ce2d3107 tempest-ServersTestJSON-516366677 tempest-ServersTestJSON-516366677-project-member] Acquiring lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.176688] env[61570]: WARNING oslo_vmware.rw_handles [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2179.176688] env[61570]: ERROR oslo_vmware.rw_handles [ 2179.177442] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2179.179028] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2179.179281] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Copying Virtual Disk [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/e252893a-ea28-4a32-8e23-9797c1840f02/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2179.179565] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0effce21-9b2c-41a7-a7a6-d3f6c2bb7f64 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.188435] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for the task: (returnval){ [ 2179.188435] env[61570]: value = "task-4891470" [ 2179.188435] env[61570]: _type = "Task" [ 2179.188435] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.196725] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Task: {'id': task-4891470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.699036] env[61570]: DEBUG oslo_vmware.exceptions [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2179.699320] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.699862] env[61570]: ERROR nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.699862] env[61570]: Faults: ['InvalidArgument'] [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Traceback (most recent call last): [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] yield resources [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.driver.spawn(context, instance, image_meta, [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._fetch_image_if_missing(context, vi) [ 2179.699862] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] image_cache(vi, tmp_image_ds_loc) [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] vm_util.copy_virtual_disk( [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] session._wait_for_task(vmdk_copy_task) [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.wait_for_task(task_ref) [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return evt.wait() [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] result = hub.switch() [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2179.700287] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.greenlet.switch() [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.f(*self.args, **self.kw) [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] raise exceptions.translate_fault(task_info.error) [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Faults: ['InvalidArgument'] [ 2179.701130] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] [ 2179.701130] env[61570]: INFO nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Terminating instance [ 2179.701758] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.701976] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2179.702217] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e354839b-e951-4559-9066-527bc207ae6b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.704382] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.704734] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.704734] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2179.712338] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2179.712519] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2179.713790] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c79c114-4ab7-42a8-8eb7-6935afa0771b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.722020] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for the task: (returnval){ [ 2179.722020] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a42134-e636-82e1-4e0e-7e46f43eeb0e" [ 2179.722020] env[61570]: _type = "Task" [ 2179.722020] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.732149] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52a42134-e636-82e1-4e0e-7e46f43eeb0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.821404] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2179.892411] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.904827] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Releasing lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.905261] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2179.905499] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2179.906647] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff7d62c-6770-4f1c-8176-8acbd0bcf358 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.914557] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2179.914789] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-300a3823-ecde-465f-a189-a0a75dda90bf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.945997] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2179.946282] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2179.946556] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Deleting the datastore file [datastore2] 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2179.946848] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-739018c7-c1ca-4594-b4b0-c6e7b50eb723 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.953609] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for the task: (returnval){ [ 2179.953609] env[61570]: value = "task-4891472" [ 2179.953609] env[61570]: _type = "Task" [ 2179.953609] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.961854] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Task: {'id': task-4891472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.232707] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2180.233085] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Creating directory with path [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2180.233264] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f68b5cd5-bb15-416d-a022-f3765bf084a8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.244798] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Created directory with path [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2180.245017] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Fetch image to [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2180.245195] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2180.245954] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc3b543-28d7-4f48-94df-de85d5fdf126 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.253229] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3eecc1-543f-4c61-91f5-ca1e23ae2ab8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.263414] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0069ceb8-17c5-48a9-a4a2-4aba009f31c1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.296114] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3097da89-bb6d-4db7-8360-a9a3765ac343 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.302511] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aa34681d-d497-4fd4-ad45-a953c5d7320c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.326891] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2180.464432] env[61570]: DEBUG oslo_vmware.api [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Task: {'id': task-4891472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046155} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.466297] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2180.466488] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2180.466682] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2180.466855] env[61570]: INFO nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2180.467104] env[61570]: DEBUG oslo.service.loopingcall [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2180.467473] env[61570]: DEBUG nova.compute.manager [-] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network deallocation for instance since networking was not requested. {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2180.469544] env[61570]: DEBUG nova.compute.claims [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2180.469714] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.469944] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.480102] env[61570]: DEBUG oslo_vmware.rw_handles [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2180.541114] env[61570]: DEBUG oslo_vmware.rw_handles [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2180.541310] env[61570]: DEBUG oslo_vmware.rw_handles [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2180.689048] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7327999e-8113-42fe-8f53-a3d3cec274ac {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.696870] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a852d5ae-6c66-47ec-8da8-2f379350745f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.728431] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28375d60-c404-40d4-8310-15b2263bf69b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.737830] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f687e71c-a8f7-4b4c-ae58-389674cbfac1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.751928] env[61570]: DEBUG nova.compute.provider_tree [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2180.784295] env[61570]: DEBUG nova.scheduler.client.report [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2180.803041] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.333s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.803486] env[61570]: ERROR nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2180.803486] env[61570]: Faults: ['InvalidArgument'] [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Traceback (most recent call last): [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.driver.spawn(context, instance, image_meta, [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._fetch_image_if_missing(context, vi) [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] image_cache(vi, tmp_image_ds_loc) [ 2180.803486] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] vm_util.copy_virtual_disk( [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] session._wait_for_task(vmdk_copy_task) [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.wait_for_task(task_ref) [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return evt.wait() [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] result = hub.switch() [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.greenlet.switch() [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2180.803789] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.f(*self.args, **self.kw) [ 2180.804198] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2180.804198] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] raise exceptions.translate_fault(task_info.error) [ 2180.804198] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2180.804198] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Faults: ['InvalidArgument'] [ 2180.804198] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] [ 2180.804388] env[61570]: DEBUG nova.compute.utils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2180.805851] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Build of instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 was re-scheduled: A specified parameter was not correct: fileType [ 2180.805851] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2180.806241] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2180.806486] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2180.806659] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2180.806842] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2180.833715] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2180.908627] env[61570]: DEBUG nova.network.neutron [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.917938] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Releasing lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.918205] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2180.918414] env[61570]: DEBUG nova.compute.manager [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Skipping network deallocation for instance since networking was not requested. {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2181.013843] env[61570]: INFO nova.scheduler.client.report [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Deleted allocations for instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 [ 2181.035326] env[61570]: DEBUG oslo_concurrency.lockutils [None req-0d813df8-1d32-451d-9415-80a19b37388d tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.802s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.036662] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 455.827s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.036873] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] During sync_power_state the instance has a pending task (spawning). Skip. [ 2181.037074] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.037704] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 442.322s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.037941] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.038199] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.038372] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.040204] env[61570]: INFO nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Terminating instance [ 2181.042136] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquiring lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.042295] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Acquired lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.042463] env[61570]: DEBUG nova.network.neutron [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2181.052133] env[61570]: DEBUG nova.compute.manager [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2181.075996] env[61570]: DEBUG nova.network.neutron [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2181.107020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.107301] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.109265] env[61570]: INFO nova.compute.claims [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2181.149367] env[61570]: DEBUG nova.network.neutron [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2181.158915] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Releasing lock "refresh_cache-20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2181.159380] env[61570]: DEBUG nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2181.159615] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2181.160189] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17571b1e-dc4a-4da9-8efd-03fc0457231b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.171702] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83dc79a-161b-4350-928b-f46b2713ea6c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.206807] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5 could not be found. [ 2181.207041] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2181.207224] env[61570]: INFO nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2181.207471] env[61570]: DEBUG oslo.service.loopingcall [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2181.210079] env[61570]: DEBUG nova.compute.manager [-] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2181.210185] env[61570]: DEBUG nova.network.neutron [-] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2181.323542] env[61570]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61570) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2181.323776] env[61570]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-11519c40-fd41-461d-b265-9508e4963dc1'] [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2181.324324] env[61570]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2181.324796] env[61570]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.325308] env[61570]: ERROR oslo.service.loopingcall [ 2181.325877] env[61570]: ERROR nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.332425] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42a6a8c-3303-46dc-a0f2-99ed1de030cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.340710] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f2d03c-9c36-4d97-9d9e-f2eb66744172 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.372583] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4156e6-7c6a-4f89-b985-24bfe9117de4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.376220] env[61570]: ERROR nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Traceback (most recent call last): [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] exception_handler_v20(status_code, error_body) [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] raise client_exc(message=error_message, [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Neutron server returns request_ids: ['req-11519c40-fd41-461d-b265-9508e4963dc1'] [ 2181.376220] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] During handling of the above exception, another exception occurred: [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Traceback (most recent call last): [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._delete_instance(context, instance, bdms) [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._shutdown_instance(context, instance, bdms) [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._try_deallocate_network(context, instance, requested_networks) [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] with excutils.save_and_reraise_exception(): [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.376706] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.force_reraise() [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] raise self.value [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] _deallocate_network_with_retries() [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return evt.wait() [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] result = hub.switch() [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.greenlet.switch() [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2181.377089] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] result = func(*self.args, **self.kw) [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] result = f(*args, **kwargs) [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._deallocate_network( [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self.network_api.deallocate_for_instance( [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] data = neutron.list_ports(**search_opts) [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.list('ports', self.ports_path, retrieve_all, [ 2181.377422] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] for r in self._pagination(collection, path, **params): [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] res = self.get(path, params=params) [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.retry_request("GET", action, body=body, [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2181.377802] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] return self.do_request(method, action, body=body, [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] ret = obj(*args, **kwargs) [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] self._handle_fault_response(status_code, replybody, resp) [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.378185] env[61570]: ERROR nova.compute.manager [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] [ 2181.383924] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf6a000-5f7c-43b1-9b7b-fbbf740da954 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.398097] env[61570]: DEBUG nova.compute.provider_tree [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2181.404450] env[61570]: DEBUG oslo_concurrency.lockutils [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Lock "20d75f7f-4fb3-4f59-8488-bb03dba5b0f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.367s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.406829] env[61570]: DEBUG nova.scheduler.client.report [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2181.419957] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.313s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.420468] env[61570]: DEBUG nova.compute.manager [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2181.451480] env[61570]: INFO nova.compute.manager [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] [instance: 20d75f7f-4fb3-4f59-8488-bb03dba5b0f5] Successfully reverted task state from None on failure for instance. [ 2181.455161] env[61570]: DEBUG nova.compute.utils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server [None req-b5b09b90-39c4-4ae6-8c2d-166f3c79eb0f tempest-ServersAaction247Test-152011525 tempest-ServersAaction247Test-152011525-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-11519c40-fd41-461d-b265-9508e4963dc1'] [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2181.457583] env[61570]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.458101] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2181.458614] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server raise self.value [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2181.459120] env[61570]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.459609] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2181.460138] env[61570]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2181.460663] env[61570]: ERROR oslo_messaging.rpc.server [ 2181.460663] env[61570]: DEBUG nova.compute.manager [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Not allocating networking since 'none' was specified. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2181.465192] env[61570]: DEBUG nova.compute.manager [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2181.527990] env[61570]: DEBUG nova.compute.manager [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2181.554585] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2181.554890] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2181.555078] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2181.555268] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2181.555457] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2181.555664] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2181.555903] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2181.556098] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2181.556286] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2181.556464] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2181.556660] env[61570]: DEBUG nova.virt.hardware [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2181.557588] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021a123e-d44a-4015-82cd-4b23e585ea2a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.566220] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831de142-f8d8-4381-ba4e-b535d899b528 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.581412] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Instance VIF info [] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2181.587330] env[61570]: DEBUG oslo.service.loopingcall [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2181.587545] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2181.587754] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4efcbf7e-4392-4e09-8637-2b7b67a3111c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.605282] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2181.605282] env[61570]: value = "task-4891473" [ 2181.605282] env[61570]: _type = "Task" [ 2181.605282] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.614610] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891473, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.115423] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891473, 'name': CreateVM_Task, 'duration_secs': 0.258969} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.115681] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2182.116211] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.116414] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.116791] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2182.117107] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78191499-8920-4646-bcbc-49e688858506 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.122177] env[61570]: DEBUG oslo_vmware.api [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Waiting for the task: (returnval){ [ 2182.122177] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521e5389-4e56-9c02-7965-253ba844791d" [ 2182.122177] env[61570]: _type = "Task" [ 2182.122177] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.131161] env[61570]: DEBUG oslo_vmware.api [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]521e5389-4e56-9c02-7965-253ba844791d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.632099] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2182.632455] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2182.632603] env[61570]: DEBUG oslo_concurrency.lockutils [None req-bc251fdc-7465-43f6-a6db-d6d3eed64ae7 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.654686] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquiring lock "4c56b397-97b5-4210-9130-0a8769f8f19c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.655097] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Lock "4c56b397-97b5-4210-9130-0a8769f8f19c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.752626] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.752905] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.765533] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.765855] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.766088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.766288] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2197.767957] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821b75b0-0a8d-45d9-a475-927948c26235 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.777957] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35987fec-26b6-473e-b6ba-815f82a9a6a2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.795927] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2150fc14-663b-467d-adca-53a87290273f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.803390] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b86bed-46c3-49ba-b40d-482274edae8c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.833410] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180576MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2197.833571] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.833779] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.913705] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.913897] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914011] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914140] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914254] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914371] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914527] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914583] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914696] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.914808] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2197.926869] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9a6bb63e-ea38-476e-8597-aba1d55ed5f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2197.938519] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4c56b397-97b5-4210-9130-0a8769f8f19c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2197.938761] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2197.938925] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '86', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_b0a2f1cca5f94645bcee541eb75b23bc': '1', 'io_workload': '10', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_task_spawning': '2', 'num_proj_d1c11dd2b9174b0a91c5f74e9410913b': '2'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2198.094933] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c2f183-ab03-4d26-8ff2-ea1a221586b1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.103297] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5490a27e-d4d5-43e1-a39a-47b6e53b6f50 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.132659] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2cb613-2184-40b4-949a-7de483afcd43 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.140846] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b866009-a092-4d4c-98ae-70c92b4633cb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.154014] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2198.162748] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2198.177881] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2198.178122] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.344s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.178647] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.179047] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2199.753264] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2199.753507] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2200.752843] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2200.753250] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2200.753250] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2200.776442] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.776625] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.776774] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.776916] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777210] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777382] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777509] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777638] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777757] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.777867] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2200.778043] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2200.778533] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.773910] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.753738] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2209.748800] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2225.886482] env[61570]: WARNING oslo_vmware.rw_handles [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2225.886482] env[61570]: ERROR oslo_vmware.rw_handles [ 2225.887235] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2225.888883] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2225.889136] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Copying Virtual Disk [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/d0c17a3c-81d1-427a-8b74-6c0db8bb20f4/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2225.889412] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a51c3a3e-639e-4735-8e8b-2fe18971ac8d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.897596] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for the task: (returnval){ [ 2225.897596] env[61570]: value = "task-4891474" [ 2225.897596] env[61570]: _type = "Task" [ 2225.897596] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.906042] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Task: {'id': task-4891474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.408478] env[61570]: DEBUG oslo_vmware.exceptions [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2226.408753] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2226.409329] env[61570]: ERROR nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2226.409329] env[61570]: Faults: ['InvalidArgument'] [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Traceback (most recent call last): [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] yield resources [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self.driver.spawn(context, instance, image_meta, [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self._fetch_image_if_missing(context, vi) [ 2226.409329] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] image_cache(vi, tmp_image_ds_loc) [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] vm_util.copy_virtual_disk( [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] session._wait_for_task(vmdk_copy_task) [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return self.wait_for_task(task_ref) [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return evt.wait() [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] result = hub.switch() [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2226.409701] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return self.greenlet.switch() [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self.f(*self.args, **self.kw) [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] raise exceptions.translate_fault(task_info.error) [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Faults: ['InvalidArgument'] [ 2226.410092] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] [ 2226.410092] env[61570]: INFO nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Terminating instance [ 2226.411285] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2226.411496] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2226.411743] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c75f9a0-55f7-4eae-bbff-4dde20f30fc9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.414147] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2226.414343] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2226.415116] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866fc7c3-eef2-4228-bad7-5741cc419c9a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.422863] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2226.423158] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57daf279-ba1d-447f-9095-c57347c91beb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.425539] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2226.425717] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2226.426772] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77901397-3f7f-4b83-a428-bef840fd6c1a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.432653] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 2226.432653] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f9e437-90c1-9174-70ed-2131a68edc5f" [ 2226.432653] env[61570]: _type = "Task" [ 2226.432653] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.440908] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52f9e437-90c1-9174-70ed-2131a68edc5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.495446] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2226.495687] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2226.495905] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Deleting the datastore file [datastore2] f01923b4-11f1-412e-bc5f-070e0fbb8a6a {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2226.496192] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fddd0f2a-d501-4743-a1d9-f6030e3d9c5b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.503340] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for the task: (returnval){ [ 2226.503340] env[61570]: value = "task-4891476" [ 2226.503340] env[61570]: _type = "Task" [ 2226.503340] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.511414] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Task: {'id': task-4891476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.943596] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2226.944053] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating directory with path [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2226.944053] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfbe8094-fe06-4cdf-8b58-0f2bc5181e7f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.955907] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Created directory with path [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2226.956158] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Fetch image to [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2226.956293] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2226.957275] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe850239-9535-45db-9ef5-400f9a024761 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.964019] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757ad2a2-6f21-4593-97e7-35e05213daa5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.973480] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be88b08d-d4f3-4840-a12b-ccf0239174e6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.007754] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64572181-965e-4bfa-b690-53f3bf522073 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.016607] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-066a844d-2040-49d7-bb78-91f00cfd6315 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.018401] env[61570]: DEBUG oslo_vmware.api [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Task: {'id': task-4891476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07977} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.018642] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2227.018824] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2227.019011] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2227.019185] env[61570]: INFO nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2227.021382] env[61570]: DEBUG nova.compute.claims [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2227.021575] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.021792] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.040699] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2227.193206] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2227.252247] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2227.252461] env[61570]: DEBUG oslo_vmware.rw_handles [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2227.274904] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb752f3-a123-44a9-b7f2-2c782b26adc3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.282808] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc88529-945b-4094-8469-ee766d141fb0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.312149] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69931634-5373-42e5-aba6-2baf68f861cc {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.319804] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8f9ad3-030a-47be-ade8-665e2767dff5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.333893] env[61570]: DEBUG nova.compute.provider_tree [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2227.343626] env[61570]: DEBUG nova.scheduler.client.report [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2227.357411] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.357930] env[61570]: ERROR nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2227.357930] env[61570]: Faults: ['InvalidArgument'] [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Traceback (most recent call last): [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self.driver.spawn(context, instance, image_meta, [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self._fetch_image_if_missing(context, vi) [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] image_cache(vi, tmp_image_ds_loc) [ 2227.357930] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] vm_util.copy_virtual_disk( [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] session._wait_for_task(vmdk_copy_task) [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return self.wait_for_task(task_ref) [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return evt.wait() [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] result = hub.switch() [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] return self.greenlet.switch() [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2227.358298] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] self.f(*self.args, **self.kw) [ 2227.358591] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2227.358591] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] raise exceptions.translate_fault(task_info.error) [ 2227.358591] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2227.358591] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Faults: ['InvalidArgument'] [ 2227.358591] env[61570]: ERROR nova.compute.manager [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] [ 2227.358712] env[61570]: DEBUG nova.compute.utils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2227.360151] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Build of instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a was re-scheduled: A specified parameter was not correct: fileType [ 2227.360151] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2227.360564] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2227.360738] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2227.360910] env[61570]: DEBUG nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2227.361094] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2227.710974] env[61570]: DEBUG nova.network.neutron [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2227.723622] env[61570]: INFO nova.compute.manager [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Took 0.36 seconds to deallocate network for instance. [ 2227.865495] env[61570]: INFO nova.scheduler.client.report [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Deleted allocations for instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a [ 2227.890281] env[61570]: DEBUG oslo_concurrency.lockutils [None req-5927b835-484e-47bd-9668-d9fc41b87ec3 tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 662.962s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.891324] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 467.274s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.891430] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Acquiring lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.891594] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.891731] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.895744] env[61570]: INFO nova.compute.manager [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Terminating instance [ 2227.897624] env[61570]: DEBUG nova.compute.manager [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2227.897819] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2227.898085] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fffc89a-fa4e-409d-b0e4-ff20899b4717 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.902747] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2227.909363] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdccc00-8301-42b8-9d93-bbcc9515a06a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.943032] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f01923b4-11f1-412e-bc5f-070e0fbb8a6a could not be found. [ 2227.943032] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2227.943184] env[61570]: INFO nova.compute.manager [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2227.943511] env[61570]: DEBUG oslo.service.loopingcall [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2227.946047] env[61570]: DEBUG nova.compute.manager [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2227.946329] env[61570]: DEBUG nova.network.neutron [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2227.961913] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.962234] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.963727] env[61570]: INFO nova.compute.claims [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2227.976458] env[61570]: DEBUG nova.network.neutron [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2227.987072] env[61570]: INFO nova.compute.manager [-] [instance: f01923b4-11f1-412e-bc5f-070e0fbb8a6a] Took 0.04 seconds to deallocate network for instance. [ 2228.080926] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9b5b10c0-d1fd-41d4-be0f-3061fba00d4b tempest-ServerRescueNegativeTestJSON-1142714140 tempest-ServerRescueNegativeTestJSON-1142714140-project-member] Lock "f01923b4-11f1-412e-bc5f-070e0fbb8a6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2228.157199] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e839ba0-156e-4936-86f3-e07466ac0698 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.165471] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fb7eec-3674-4ecb-a77e-e866f6220e27 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.194925] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6ce799-a10e-4372-87e1-793ae5ba962c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.202534] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04380fe4-d866-476d-8af8-e56373f92296 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.215740] env[61570]: DEBUG nova.compute.provider_tree [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2228.224911] env[61570]: DEBUG nova.scheduler.client.report [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2228.240254] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2228.240732] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2228.274024] env[61570]: DEBUG nova.compute.utils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2228.276070] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2228.276280] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2228.285998] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2228.342918] env[61570]: DEBUG nova.policy [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e24cb22e501471691413f3304a7d5a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9879c18ea258468bad18ac62d0610b69', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 2228.350738] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2228.378218] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2228.378698] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2228.378698] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2228.378827] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2228.378906] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2228.379067] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2228.379293] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2228.379452] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2228.379617] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2228.379775] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2228.380097] env[61570]: DEBUG nova.virt.hardware [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2228.380823] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fd1278-7604-4fb5-b8f1-6b84d1da1a2c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.389048] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d732256-0821-4430-8f70-752f9b2f3693 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.707269] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Successfully created port: 130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2229.415075] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Successfully updated port: 130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2229.427685] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.427836] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquired lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.427986] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2229.478175] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2229.653042] env[61570]: DEBUG nova.network.neutron [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Updating instance_info_cache with network_info: [{"id": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "address": "fa:16:3e:f0:aa:d8", "network": {"id": "91d913f4-990c-463a-8b98-edf28e9490cd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1523747671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9879c18ea258468bad18ac62d0610b69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap130d2967-0f", "ovs_interfaceid": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2229.665770] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Releasing lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.666138] env[61570]: DEBUG nova.compute.manager [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Instance network_info: |[{"id": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "address": "fa:16:3e:f0:aa:d8", "network": {"id": "91d913f4-990c-463a-8b98-edf28e9490cd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1523747671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9879c18ea258468bad18ac62d0610b69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap130d2967-0f", "ovs_interfaceid": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2229.666577] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:aa:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '130d2967-0fd7-4bd0-8bf4-761e63ae7182', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2229.674020] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Creating folder: Project (9879c18ea258468bad18ac62d0610b69). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2229.674589] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4d5cdf1-55f7-4457-8155-5681b095b8a5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.685458] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Created folder: Project (9879c18ea258468bad18ac62d0610b69) in parent group-v953072. [ 2229.685654] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Creating folder: Instances. Parent ref: group-v953192. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2229.685933] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c79524d-c559-4469-b552-f2c6b7518d65 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.694033] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Created folder: Instances in parent group-v953192. [ 2229.694226] env[61570]: DEBUG oslo.service.loopingcall [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2229.694407] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2229.694613] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-707eabe6-017c-473f-8a8d-806961c436c1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.715215] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2229.715215] env[61570]: value = "task-4891479" [ 2229.715215] env[61570]: _type = "Task" [ 2229.715215] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.723485] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891479, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.828607] env[61570]: DEBUG nova.compute.manager [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Received event network-vif-plugged-130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2229.828856] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Acquiring lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.829122] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.829319] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.829533] env[61570]: DEBUG nova.compute.manager [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] No waiting events found dispatching network-vif-plugged-130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2229.829768] env[61570]: WARNING nova.compute.manager [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Received unexpected event network-vif-plugged-130d2967-0fd7-4bd0-8bf4-761e63ae7182 for instance with vm_state building and task_state spawning. [ 2229.829962] env[61570]: DEBUG nova.compute.manager [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Received event network-changed-130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2229.830146] env[61570]: DEBUG nova.compute.manager [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Refreshing instance network info cache due to event network-changed-130d2967-0fd7-4bd0-8bf4-761e63ae7182. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2229.830365] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Acquiring lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.830506] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Acquired lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.830667] env[61570]: DEBUG nova.network.neutron [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Refreshing network info cache for port 130d2967-0fd7-4bd0-8bf4-761e63ae7182 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2230.151664] env[61570]: DEBUG nova.network.neutron [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Updated VIF entry in instance network info cache for port 130d2967-0fd7-4bd0-8bf4-761e63ae7182. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2230.151664] env[61570]: DEBUG nova.network.neutron [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Updating instance_info_cache with network_info: [{"id": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "address": "fa:16:3e:f0:aa:d8", "network": {"id": "91d913f4-990c-463a-8b98-edf28e9490cd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1523747671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9879c18ea258468bad18ac62d0610b69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap130d2967-0f", "ovs_interfaceid": "130d2967-0fd7-4bd0-8bf4-761e63ae7182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2230.161312] env[61570]: DEBUG oslo_concurrency.lockutils [req-9b3d1d74-6e58-45c8-94e5-b942a25f19bc req-d2a1d7b3-8f75-4137-8713-e4679c1c6f0a service nova] Releasing lock "refresh_cache-9a6bb63e-ea38-476e-8597-aba1d55ed5f5" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.225980] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891479, 'name': CreateVM_Task, 'duration_secs': 0.293155} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.226173] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2230.226863] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2230.227083] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2230.227406] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2230.227937] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea6f2cc2-401c-4e04-b031-bacedad381b9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.232515] env[61570]: DEBUG oslo_vmware.api [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Waiting for the task: (returnval){ [ 2230.232515] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c9d38a-55ac-aabd-4908-e892893b8989" [ 2230.232515] env[61570]: _type = "Task" [ 2230.232515] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.240193] env[61570]: DEBUG oslo_vmware.api [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52c9d38a-55ac-aabd-4908-e892893b8989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.743935] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.744283] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2230.744509] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ecaa22cb-762f-48f4-9eb4-0524c2780823 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.079393] env[61570]: DEBUG oslo_concurrency.lockutils [None req-7f18ed69-41cd-4a1e-a0f8-1187c9e60ed4 tempest-ServerShowV247Test-501578296 tempest-ServerShowV247Test-501578296-project-member] Acquiring lock "ece33fed-2e33-4876-83b5-7618968faa12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.753240] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.753652] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2258.753652] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.766031] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.766289] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.766466] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.766620] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2258.767721] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2301ee04-8524-4377-8e1e-6fa20932ebc8 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.776638] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8d75c3-6cc6-4438-94b1-8819f5fa5c5a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.790515] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634f2a40-8915-40ec-9511-7e8fac744cc2 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.796884] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8591a369-b41c-4388-bdf3-cb7629c26725 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.825063] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180565MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2258.825249] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.825397] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.903215] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d7a13cff-f371-46d2-baea-b01a3731724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.903452] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.903541] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.903667] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.903804] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.903924] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.904056] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.904221] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.904347] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.904463] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9a6bb63e-ea38-476e-8597-aba1d55ed5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2258.918870] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4c56b397-97b5-4210-9130-0a8769f8f19c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2258.919107] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2258.919277] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '87', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '8', 'num_os_type_None': '10', 'num_proj_c0374f79fe6d4946a64c2acc369178cf': '1', 'io_workload': '10', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_task_spawning': '2', 'num_proj_d1c11dd2b9174b0a91c5f74e9410913b': '2', 'num_proj_9879c18ea258468bad18ac62d0610b69': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2259.066082] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bc2904-8693-4c78-8145-5a5bcaea32f9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.074244] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d1c9f9-74a2-42ce-8372-1756e501d865 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.104424] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ae7ea3-3cf0-4f40-a86f-8fcbe22ba501 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.112690] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244b7d4f-cb17-4fbe-b9bf-08762348e6ba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.126779] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2259.135127] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2259.150614] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2259.150808] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.325s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.151429] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.753192] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.753442] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.754213] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2262.753585] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2262.753800] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2262.753892] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2262.776371] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.776656] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.776690] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.776854] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.776945] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777135] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777316] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777437] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777562] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777686] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2262.777814] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2265.753534] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.750315] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.061729] env[61570]: WARNING oslo_vmware.rw_handles [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2275.061729] env[61570]: ERROR oslo_vmware.rw_handles [ 2275.062538] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2275.063942] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2275.064356] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Copying Virtual Disk [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/4f19b242-f2bd-45ee-aa7a-e0e86f6d275f/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2275.064642] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-886732c0-cf42-4e3d-b4f3-fcb3487f1079 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.072184] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 2275.072184] env[61570]: value = "task-4891480" [ 2275.072184] env[61570]: _type = "Task" [ 2275.072184] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.082728] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.582816] env[61570]: DEBUG oslo_vmware.exceptions [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2275.583152] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2275.583753] env[61570]: ERROR nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2275.583753] env[61570]: Faults: ['InvalidArgument'] [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Traceback (most recent call last): [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] yield resources [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self.driver.spawn(context, instance, image_meta, [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self._fetch_image_if_missing(context, vi) [ 2275.583753] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] image_cache(vi, tmp_image_ds_loc) [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] vm_util.copy_virtual_disk( [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] session._wait_for_task(vmdk_copy_task) [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return self.wait_for_task(task_ref) [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return evt.wait() [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] result = hub.switch() [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2275.584121] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return self.greenlet.switch() [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self.f(*self.args, **self.kw) [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] raise exceptions.translate_fault(task_info.error) [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Faults: ['InvalidArgument'] [ 2275.584644] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] [ 2275.584644] env[61570]: INFO nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Terminating instance [ 2275.586334] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2275.586601] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2275.587361] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2275.587546] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2275.587809] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dacd8a3-cf79-4503-96e9-ff97f5d61001 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.590355] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667f8d06-d2ba-4121-824e-e1d04c87fd0d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.597640] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2275.597838] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1a4d644-8871-4436-960b-35329b4970f7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.600093] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2275.600270] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2275.601300] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50f87653-0a32-4078-a7ac-5ec9c271e6c9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.606510] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 2275.606510] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5225e387-a97c-3a1d-fe52-871727821d47" [ 2275.606510] env[61570]: _type = "Task" [ 2275.606510] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.615140] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5225e387-a97c-3a1d-fe52-871727821d47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.676585] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2275.676844] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2275.677052] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleting the datastore file [datastore2] d7a13cff-f371-46d2-baea-b01a3731724a {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2275.677331] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc28d19c-d7a2-43cc-b13a-1c3202b92df1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.684398] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for the task: (returnval){ [ 2275.684398] env[61570]: value = "task-4891482" [ 2275.684398] env[61570]: _type = "Task" [ 2275.684398] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.692580] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.117998] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2276.118322] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating directory with path [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2276.118552] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a149a2e5-95a4-42cb-b658-b1cf8ea6c4ec {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.131235] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Created directory with path [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2276.131451] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Fetch image to [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2276.131665] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2276.132471] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145a78d2-a8c6-4c10-a503-33b859d71186 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.141310] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f02ae4-5bda-4509-afcb-bfd78ebcf91e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.152618] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d198ca-1e86-43a8-8f7e-4fa0a22c4f07 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.185356] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c287ace3-9c1f-4174-8720-65e3f05f0157 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.197955] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c628e9a0-f458-4350-816d-85052abe8bfa {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.199836] env[61570]: DEBUG oslo_vmware.api [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Task: {'id': task-4891482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068014} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.200109] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2276.200297] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2276.200470] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2276.200643] env[61570]: INFO nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2276.203075] env[61570]: DEBUG nova.compute.claims [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2276.203249] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2276.203461] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2276.223264] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2276.332274] env[61570]: DEBUG oslo_vmware.rw_handles [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2276.390404] env[61570]: DEBUG oslo_vmware.rw_handles [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2276.390404] env[61570]: DEBUG oslo_vmware.rw_handles [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2276.459505] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0afe113-f58c-48e0-988d-ead145459b83 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.467882] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ac8885-9578-4866-bfc0-ffddd27a1a98 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.499198] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662d8aa9-59a3-4485-b2a4-c51cb480c9d9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.507409] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b1e715-1e0d-4fd3-ac52-37ead23aaaa5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.521756] env[61570]: DEBUG nova.compute.provider_tree [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2276.530552] env[61570]: DEBUG nova.scheduler.client.report [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2276.545141] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2276.545733] env[61570]: ERROR nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2276.545733] env[61570]: Faults: ['InvalidArgument'] [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Traceback (most recent call last): [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self.driver.spawn(context, instance, image_meta, [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self._fetch_image_if_missing(context, vi) [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] image_cache(vi, tmp_image_ds_loc) [ 2276.545733] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] vm_util.copy_virtual_disk( [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] session._wait_for_task(vmdk_copy_task) [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return self.wait_for_task(task_ref) [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return evt.wait() [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] result = hub.switch() [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] return self.greenlet.switch() [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2276.546142] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] self.f(*self.args, **self.kw) [ 2276.546555] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2276.546555] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] raise exceptions.translate_fault(task_info.error) [ 2276.546555] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2276.546555] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Faults: ['InvalidArgument'] [ 2276.546555] env[61570]: ERROR nova.compute.manager [instance: d7a13cff-f371-46d2-baea-b01a3731724a] [ 2276.546555] env[61570]: DEBUG nova.compute.utils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2276.548567] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Build of instance d7a13cff-f371-46d2-baea-b01a3731724a was re-scheduled: A specified parameter was not correct: fileType [ 2276.548567] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2276.549034] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2276.549197] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2276.549373] env[61570]: DEBUG nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2276.549543] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2276.945115] env[61570]: DEBUG nova.network.neutron [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2276.959890] env[61570]: INFO nova.compute.manager [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Took 0.41 seconds to deallocate network for instance. [ 2277.123854] env[61570]: INFO nova.scheduler.client.report [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Deleted allocations for instance d7a13cff-f371-46d2-baea-b01a3731724a [ 2277.150614] env[61570]: DEBUG oslo_concurrency.lockutils [None req-67f15a4a-f905-4918-9bd4-59e91ae34204 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 690.167s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.151824] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 494.428s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.152053] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.152268] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.152435] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.155025] env[61570]: INFO nova.compute.manager [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Terminating instance [ 2277.157513] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquiring lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2277.157513] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Acquired lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2277.157684] env[61570]: DEBUG nova.network.neutron [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2277.170316] env[61570]: DEBUG nova.compute.manager [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2277.217988] env[61570]: DEBUG nova.network.neutron [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2277.243984] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.244412] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.247090] env[61570]: INFO nova.compute.claims [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2277.467386] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2e763d-dc2b-49f9-b371-0376d7b19219 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.476215] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c0c835-6b8f-4456-ba85-6ff9f5189b80 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.508553] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae882d2-149c-4e19-a5f6-055b213b9757 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.517590] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9277e5de-ecc2-4ded-98ee-5aa61cafaa06 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.542146] env[61570]: DEBUG nova.compute.provider_tree [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2277.555972] env[61570]: DEBUG nova.scheduler.client.report [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2277.577233] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.578712] env[61570]: DEBUG nova.compute.manager [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2277.619646] env[61570]: DEBUG nova.compute.utils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2277.624029] env[61570]: DEBUG nova.compute.manager [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Not allocating networking since 'none' was specified. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 2277.649521] env[61570]: DEBUG nova.compute.manager [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2277.699264] env[61570]: DEBUG nova.network.neutron [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2277.708642] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Releasing lock "refresh_cache-d7a13cff-f371-46d2-baea-b01a3731724a" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2277.709232] env[61570]: DEBUG nova.compute.manager [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2277.709460] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2277.710106] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c77f677-1937-4082-8645-e8b8415eb8a4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.723982] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1d57f0-d146-4bf5-8dc1-94103061e5ea {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.737282] env[61570]: DEBUG nova.compute.manager [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2277.767285] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7a13cff-f371-46d2-baea-b01a3731724a could not be found. [ 2277.767551] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2277.768120] env[61570]: INFO nova.compute.manager [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2277.768120] env[61570]: DEBUG oslo.service.loopingcall [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2277.774248] env[61570]: DEBUG nova.compute.manager [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2277.774248] env[61570]: DEBUG nova.network.neutron [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2277.782705] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2277.783200] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2277.783562] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2277.783950] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2277.784281] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2277.784616] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2277.785060] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2277.785416] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2277.785764] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2277.786142] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2277.786561] env[61570]: DEBUG nova.virt.hardware [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2277.788209] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218593f0-d73e-449d-91dc-67153829450f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.802017] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97af2e25-968c-45a5-a2b5-24089873440c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.818068] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Instance VIF info [] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2277.824528] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Creating folder: Project (ad9eb62e68d84940a9f2ee2acbe60221). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2277.825644] env[61570]: DEBUG nova.network.neutron [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2277.827574] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cf4ed93-dd85-4273-80fb-0f783e86935c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.836764] env[61570]: DEBUG nova.network.neutron [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2277.841021] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Created folder: Project (ad9eb62e68d84940a9f2ee2acbe60221) in parent group-v953072. [ 2277.841021] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Creating folder: Instances. Parent ref: group-v953195. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2277.841021] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c30b8b7-1e12-4d59-8b39-2c3e7cb749ba {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.849797] env[61570]: INFO nova.compute.manager [-] [instance: d7a13cff-f371-46d2-baea-b01a3731724a] Took 0.08 seconds to deallocate network for instance. [ 2277.853116] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Created folder: Instances in parent group-v953195. [ 2277.853450] env[61570]: DEBUG oslo.service.loopingcall [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2277.856715] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2277.857430] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a015dbc3-8711-4896-bc65-18c3f55eace4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.880615] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2277.880615] env[61570]: value = "task-4891485" [ 2277.880615] env[61570]: _type = "Task" [ 2277.880615] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.890410] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891485, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.986914] env[61570]: DEBUG oslo_concurrency.lockutils [None req-6ee2cca7-e0e0-4c16-a402-a6d305f665b6 tempest-AttachVolumeTestJSON-1222939441 tempest-AttachVolumeTestJSON-1222939441-project-member] Lock "d7a13cff-f371-46d2-baea-b01a3731724a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.835s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2278.395519] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891485, 'name': CreateVM_Task, 'duration_secs': 0.294901} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.395940] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2278.396755] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2278.397038] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2278.397633] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2278.398219] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e173c2f9-4761-44bd-970a-7e955a4f0aae {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.404860] env[61570]: DEBUG oslo_vmware.api [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Waiting for the task: (returnval){ [ 2278.404860] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]528c076d-2574-3e59-c170-575493d972a8" [ 2278.404860] env[61570]: _type = "Task" [ 2278.404860] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.421937] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2278.421937] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2278.422088] env[61570]: DEBUG oslo_concurrency.lockutils [None req-f8473761-27a2-4a13-baae-0c8ac57784b4 tempest-ServersListShow296Test-440227283 tempest-ServersListShow296Test-440227283-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.773743] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquiring lock "977a0d78-1e10-4a67-8c90-54c385485622" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.778335] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Lock "977a0d78-1e10-4a67-8c90-54c385485622" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.752763] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.753180] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2319.753460] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.753839] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.765968] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.766197] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.766368] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.766523] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2319.768049] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3cfd52-902e-493f-b4d5-56a5fabe014f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.776596] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a6f8df-197c-4ae5-888a-d5d6c5cecdbd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.790900] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d134b0-7103-40be-948f-328a787d02fe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.797512] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8021efb9-3a95-43da-a446-fd1e438c07fb {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.827513] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180580MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2319.827704] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.827908] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.978165] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.978345] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 15cc451d-9419-4952-83a4-4fde3d237f8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.978476] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.978599] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.978732] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.978908] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.979048] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.979171] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.979287] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9a6bb63e-ea38-476e-8597-aba1d55ed5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.979398] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4c56b397-97b5-4210-9130-0a8769f8f19c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2319.994992] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 977a0d78-1e10-4a67-8c90-54c385485622 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1767}} [ 2319.995245] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2319.995408] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '88', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '7', 'num_os_type_None': '10', 'num_proj_1e239f92d55742a7bf8a5bbc33ca718b': '1', 'io_workload': '10', 'num_proj_fb1862517c1f4d239b931bb18211f2b8': '1', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_task_spawning': '3', 'num_proj_d1c11dd2b9174b0a91c5f74e9410913b': '2', 'num_proj_9879c18ea258468bad18ac62d0610b69': '1', 'num_proj_ad9eb62e68d84940a9f2ee2acbe60221': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2320.136051] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606f455f-3d8d-4876-8173-e807056b8783 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.144031] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49434467-4272-443d-a52a-da0461902fdd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.174689] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92056f7-d961-4f2a-a7eb-5141c9d2d543 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.182331] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd27713-791d-4365-8e41-493b59c12616 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.195739] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2320.204423] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2320.218341] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2320.218527] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.391s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.218747] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.218889] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11307}} [ 2320.226758] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] There are 0 instances to clean {{(pid=61570) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11316}} [ 2321.196921] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.197475] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 2321.197475] env[61570]: value = "domain-c8" [ 2321.197475] env[61570]: _type = "ClusterComputeResource" [ 2321.197475] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2321.198524] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ed29e0-616c-4b34-8856-055b560558c4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.215348] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 10 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2321.790204] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.790204] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.753832] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.351734] env[61570]: WARNING oslo_vmware.rw_handles [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2324.351734] env[61570]: ERROR oslo_vmware.rw_handles [ 2324.352822] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2324.354158] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2324.354428] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Copying Virtual Disk [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/e96ad0c3-ca8b-4db5-82e5-c5bb8576bdb0/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2324.354732] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b314f5cb-ac9f-40b3-a3f0-0f3ed0c20067 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.362662] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 2324.362662] env[61570]: value = "task-4891486" [ 2324.362662] env[61570]: _type = "Task" [ 2324.362662] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.371073] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.749194] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.773444] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2324.773621] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2324.773715] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2324.792511] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.792669] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.792805] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.792933] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793066] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793191] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793309] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793425] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793544] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793662] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2324.793785] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2324.873200] env[61570]: DEBUG oslo_vmware.exceptions [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2324.873496] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.874062] env[61570]: ERROR nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2324.874062] env[61570]: Faults: ['InvalidArgument'] [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Traceback (most recent call last): [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] yield resources [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self.driver.spawn(context, instance, image_meta, [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self._fetch_image_if_missing(context, vi) [ 2324.874062] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] image_cache(vi, tmp_image_ds_loc) [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] vm_util.copy_virtual_disk( [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] session._wait_for_task(vmdk_copy_task) [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return self.wait_for_task(task_ref) [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return evt.wait() [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] result = hub.switch() [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2324.874420] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return self.greenlet.switch() [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self.f(*self.args, **self.kw) [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] raise exceptions.translate_fault(task_info.error) [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Faults: ['InvalidArgument'] [ 2324.874861] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] [ 2324.874861] env[61570]: INFO nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Terminating instance [ 2324.875969] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2324.876194] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2324.876438] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-277ff629-7112-4f0a-bad3-e83426498db7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.878735] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2324.878930] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2324.879667] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3090dd-5942-4076-9a5f-024d473cf939 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.886287] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2324.886504] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b154fa17-2bfe-4673-96ab-de49f6a39a7d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.888929] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2324.889122] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2324.890058] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8e8453c-e1bc-41e5-b118-6c8e725f972b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.894676] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for the task: (returnval){ [ 2324.894676] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]528b120a-aec7-5ef9-4609-b620d6f8ad67" [ 2324.894676] env[61570]: _type = "Task" [ 2324.894676] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.903882] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]528b120a-aec7-5ef9-4609-b620d6f8ad67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.110305] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2325.110552] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2325.110825] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleting the datastore file [datastore2] 788bc317-0136-42c9-b8f6-7d1a68df3109 {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2325.111137] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fa06a6b-b2ff-4c1e-9f02-b3f5f8f3af5b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.118074] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for the task: (returnval){ [ 2325.118074] env[61570]: value = "task-4891488" [ 2325.118074] env[61570]: _type = "Task" [ 2325.118074] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.125744] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.405682] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2325.406064] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Creating directory with path [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2325.406107] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fceffc15-93ed-4c9a-b5c2-9f95bb116f22 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.417582] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Created directory with path [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2325.417834] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Fetch image to [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2325.418081] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2325.418813] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55129a68-f20e-4548-92eb-ba19c2446b2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.426225] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff6932c-700a-4357-bb27-976b427fb9a4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.436136] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088e5c96-cab8-4a4d-9ffa-e0864e460678 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.470241] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8287bc0f-59d5-4379-980b-efb8e09e48e9 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.477104] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8d2fcf59-27e5-4ec1-af75-0062f8a91406 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.502144] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2325.554518] env[61570]: DEBUG oslo_vmware.rw_handles [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2325.613290] env[61570]: DEBUG oslo_vmware.rw_handles [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2325.613559] env[61570]: DEBUG oslo_vmware.rw_handles [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2325.627696] env[61570]: DEBUG oslo_vmware.api [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Task: {'id': task-4891488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071601} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.627936] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2325.628145] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2325.628325] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2325.628497] env[61570]: INFO nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Took 0.75 seconds to destroy the instance on the hypervisor. [ 2325.630752] env[61570]: DEBUG nova.compute.claims [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2325.630921] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2325.631146] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2325.753190] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.833451] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735f34e6-0455-4030-b8e5-4d8215ac6abd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.841202] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8244fd-114b-49fd-b61e-1eb41f176efe {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.873507] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb7fb7e-839b-4c8e-bc29-28ea6bb012e0 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.881711] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac290f95-ee09-417a-afe5-8858b1f20fc7 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.895276] env[61570]: DEBUG nova.compute.provider_tree [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2325.905346] env[61570]: DEBUG nova.scheduler.client.report [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2325.921288] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.290s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2325.921766] env[61570]: ERROR nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2325.921766] env[61570]: Faults: ['InvalidArgument'] [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Traceback (most recent call last): [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self.driver.spawn(context, instance, image_meta, [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self._fetch_image_if_missing(context, vi) [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] image_cache(vi, tmp_image_ds_loc) [ 2325.921766] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] vm_util.copy_virtual_disk( [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] session._wait_for_task(vmdk_copy_task) [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return self.wait_for_task(task_ref) [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return evt.wait() [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] result = hub.switch() [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] return self.greenlet.switch() [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2325.922311] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] self.f(*self.args, **self.kw) [ 2325.922854] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2325.922854] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] raise exceptions.translate_fault(task_info.error) [ 2325.922854] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2325.922854] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Faults: ['InvalidArgument'] [ 2325.922854] env[61570]: ERROR nova.compute.manager [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] [ 2325.922854] env[61570]: DEBUG nova.compute.utils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2325.923962] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Build of instance 788bc317-0136-42c9-b8f6-7d1a68df3109 was re-scheduled: A specified parameter was not correct: fileType [ 2325.923962] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2325.924447] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2325.924634] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2325.924813] env[61570]: DEBUG nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2325.924977] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2326.281642] env[61570]: DEBUG nova.network.neutron [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2326.294673] env[61570]: INFO nova.compute.manager [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Took 0.37 seconds to deallocate network for instance. [ 2326.397051] env[61570]: INFO nova.scheduler.client.report [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Deleted allocations for instance 788bc317-0136-42c9-b8f6-7d1a68df3109 [ 2326.418442] env[61570]: DEBUG oslo_concurrency.lockutils [None req-ad9d0b5b-4d5c-41bb-beb7-12ec4bea5417 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 673.473s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.419675] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 477.362s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.419931] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Acquiring lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2326.420194] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.420374] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.422598] env[61570]: INFO nova.compute.manager [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Terminating instance [ 2326.424450] env[61570]: DEBUG nova.compute.manager [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2326.424648] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2326.425154] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e287e4cb-bcc2-4278-a9fe-550de3ecc41a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.436803] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cbc9eb-b19f-4c56-84fc-52bd21cd3cb4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.447261] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2326.477168] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 788bc317-0136-42c9-b8f6-7d1a68df3109 could not be found. [ 2326.477417] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2326.477602] env[61570]: INFO nova.compute.manager [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2326.477892] env[61570]: DEBUG oslo.service.loopingcall [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2326.478203] env[61570]: DEBUG nova.compute.manager [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2326.478303] env[61570]: DEBUG nova.network.neutron [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2326.497905] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2326.498206] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.499688] env[61570]: INFO nova.compute.claims [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2326.503072] env[61570]: DEBUG nova.network.neutron [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2326.513469] env[61570]: INFO nova.compute.manager [-] [instance: 788bc317-0136-42c9-b8f6-7d1a68df3109] Took 0.04 seconds to deallocate network for instance. [ 2326.610135] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c4d3e473-1302-435c-ac76-bd124c4255e0 tempest-MultipleCreateTestJSON-840349459 tempest-MultipleCreateTestJSON-840349459-project-member] Lock "788bc317-0136-42c9-b8f6-7d1a68df3109" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.683089] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2980a7b-8f60-4f6e-ab1b-3a317c1a6252 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.691458] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f36ead-ad12-40bf-8fc6-a2e6ea6c10ef {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.721898] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697929e3-ea97-4048-9e56-c49dcf951aca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.729888] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ad3392-4d83-497f-a2c7-12c0126cb8a3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.744371] env[61570]: DEBUG nova.compute.provider_tree [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2326.753503] env[61570]: DEBUG nova.scheduler.client.report [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2326.767016] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.269s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2326.767375] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2326.825207] env[61570]: DEBUG nova.compute.utils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2326.826567] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2326.826741] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2326.836871] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2326.894804] env[61570]: DEBUG nova.policy [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa6d3f3fb5e472a92a5bc425224aee9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0af458d2a48407f8609850f08fc65c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 2326.913430] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2326.944357] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2326.944357] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2326.944357] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2326.944523] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2326.944523] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2326.944523] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2326.944523] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2326.944523] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2326.944698] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2326.944698] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2326.944698] env[61570]: DEBUG nova.virt.hardware [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2326.944960] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8e9af4-14f3-4f9b-b561-ce25a3796005 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.953913] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a13c5f9-a6b3-404d-9dce-fdd13e8a778e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.449481] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Successfully created port: bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2328.118503] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Successfully updated port: bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2328.126668] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquiring lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.126829] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquired lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.126996] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2328.199833] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2328.330257] env[61570]: DEBUG nova.compute.manager [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Received event network-vif-plugged-bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2328.330546] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Acquiring lock "977a0d78-1e10-4a67-8c90-54c385485622-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.330762] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Lock "977a0d78-1e10-4a67-8c90-54c385485622-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.330933] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Lock "977a0d78-1e10-4a67-8c90-54c385485622-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2328.331131] env[61570]: DEBUG nova.compute.manager [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] No waiting events found dispatching network-vif-plugged-bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2328.331301] env[61570]: WARNING nova.compute.manager [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Received unexpected event network-vif-plugged-bb1c5e19-53aa-483d-9a22-00c85a69b0e7 for instance with vm_state building and task_state spawning. [ 2328.331557] env[61570]: DEBUG nova.compute.manager [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Received event network-changed-bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2328.331610] env[61570]: DEBUG nova.compute.manager [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Refreshing instance network info cache due to event network-changed-bb1c5e19-53aa-483d-9a22-00c85a69b0e7. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2328.331783] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Acquiring lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.400966] env[61570]: DEBUG nova.network.neutron [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Updating instance_info_cache with network_info: [{"id": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "address": "fa:16:3e:02:20:42", "network": {"id": "a9d175f4-2177-4efc-a7af-2fc159ec11f7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-222280056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0af458d2a48407f8609850f08fc65c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1c5e19-53", "ovs_interfaceid": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2328.415380] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Releasing lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.415661] env[61570]: DEBUG nova.compute.manager [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Instance network_info: |[{"id": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "address": "fa:16:3e:02:20:42", "network": {"id": "a9d175f4-2177-4efc-a7af-2fc159ec11f7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-222280056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0af458d2a48407f8609850f08fc65c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1c5e19-53", "ovs_interfaceid": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2328.415979] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Acquired lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.416182] env[61570]: DEBUG nova.network.neutron [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Refreshing network info cache for port bb1c5e19-53aa-483d-9a22-00c85a69b0e7 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2328.417332] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:20:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb1c5e19-53aa-483d-9a22-00c85a69b0e7', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2328.425274] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Creating folder: Project (e0af458d2a48407f8609850f08fc65c1). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2328.428617] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58a344ae-2519-46fc-b6a4-67f92401872b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.441480] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Created folder: Project (e0af458d2a48407f8609850f08fc65c1) in parent group-v953072. [ 2328.441751] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Creating folder: Instances. Parent ref: group-v953198. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2328.442014] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77d1204f-1a36-450b-b70f-edbceed86489 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.452175] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Created folder: Instances in parent group-v953198. [ 2328.452606] env[61570]: DEBUG oslo.service.loopingcall [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2328.452606] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2328.452823] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5860ea5e-45f6-4d4d-bd30-b913b6186348 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.476338] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2328.476338] env[61570]: value = "task-4891491" [ 2328.476338] env[61570]: _type = "Task" [ 2328.476338] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.486136] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891491, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.698052] env[61570]: DEBUG nova.network.neutron [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Updated VIF entry in instance network info cache for port bb1c5e19-53aa-483d-9a22-00c85a69b0e7. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2328.698465] env[61570]: DEBUG nova.network.neutron [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Updating instance_info_cache with network_info: [{"id": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "address": "fa:16:3e:02:20:42", "network": {"id": "a9d175f4-2177-4efc-a7af-2fc159ec11f7", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-222280056-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0af458d2a48407f8609850f08fc65c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1c5e19-53", "ovs_interfaceid": "bb1c5e19-53aa-483d-9a22-00c85a69b0e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2328.708866] env[61570]: DEBUG oslo_concurrency.lockutils [req-3c82ccfe-834d-4ad7-a4d5-6cefb15f4e92 req-f6eed9cc-f550-46be-8073-9c0d95901884 service nova] Releasing lock "refresh_cache-977a0d78-1e10-4a67-8c90-54c385485622" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.986746] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891491, 'name': CreateVM_Task, 'duration_secs': 0.29974} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.986942] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2328.987558] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.987728] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.988084] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2328.988346] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f0981d-25ac-4772-95ba-e5c5b5b1462f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.992822] env[61570]: DEBUG oslo_vmware.api [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Waiting for the task: (returnval){ [ 2328.992822] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5286904e-ff13-63e6-69b2-fb16f558e4d1" [ 2328.992822] env[61570]: _type = "Task" [ 2328.992822] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.002250] env[61570]: DEBUG oslo_vmware.api [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]5286904e-ff13-63e6-69b2-fb16f558e4d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.503982] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.504345] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2329.504403] env[61570]: DEBUG oslo_concurrency.lockutils [None req-c07f8e93-40b5-41fe-8b70-ea4976ee769c tempest-AttachVolumeNegativeTest-1791556996 tempest-AttachVolumeNegativeTest-1791556996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.748283] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.753395] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.753812] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Cleaning up deleted instances with incomplete migration {{(pid=61570) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11345}} [ 2343.200070] env[61570]: DEBUG oslo_concurrency.lockutils [None req-111618c3-1095-4b64-8b21-b90a01f75131 tempest-ServerPasswordTestJSON-2045402662 tempest-ServerPasswordTestJSON-2045402662-project-member] Acquiring lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2343.753382] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.173671] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._sync_power_states {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.198010] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Getting list of instances from cluster (obj){ [ 2344.198010] env[61570]: value = "domain-c8" [ 2344.198010] env[61570]: _type = "ClusterComputeResource" [ 2344.198010] env[61570]: } {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2344.199430] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e56bed-24f1-4e73-8baa-f4272320453e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.217568] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Got total of 10 instances {{(pid=61570) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2344.217928] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 15cc451d-9419-4952-83a4-4fde3d237f8b {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.217928] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 8133bec0-155c-4ffe-b972-adabe3b281dc {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.218213] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.218282] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid de0d43a3-122f-43de-9992-e30d2954408f {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.218583] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 2fde6764-2bfe-4634-b371-91bc1a5e38e4 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.218742] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 345f407a-879d-4c87-810a-fbad1b1d4c07 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.218895] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid ece33fed-2e33-4876-83b5-7618968faa12 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.219061] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 9a6bb63e-ea38-476e-8597-aba1d55ed5f5 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.219626] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 4c56b397-97b5-4210-9130-0a8769f8f19c {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.219626] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Triggering sync for uuid 977a0d78-1e10-4a67-8c90-54c385485622 {{(pid=61570) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10417}} [ 2344.219775] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "15cc451d-9419-4952-83a4-4fde3d237f8b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.219942] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "8133bec0-155c-4ffe-b972-adabe3b281dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.220171] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "7cbe40c4-0c9d-4ce7-bcb0-0481a170398c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.220401] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "de0d43a3-122f-43de-9992-e30d2954408f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.220626] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "2fde6764-2bfe-4634-b371-91bc1a5e38e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.220821] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "345f407a-879d-4c87-810a-fbad1b1d4c07" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.221024] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "ece33fed-2e33-4876-83b5-7618968faa12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.221227] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "9a6bb63e-ea38-476e-8597-aba1d55ed5f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.221417] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "4c56b397-97b5-4210-9130-0a8769f8f19c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.221613] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "977a0d78-1e10-4a67-8c90-54c385485622" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.146344] env[61570]: WARNING oslo_vmware.rw_handles [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles response.begin() [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2374.146344] env[61570]: ERROR oslo_vmware.rw_handles [ 2374.146344] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Downloaded image file data 64ba497f-0d92-47de-bece-8112101951ad to vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2374.148665] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Caching image {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2374.148932] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Copying Virtual Disk [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk to [datastore2] vmware_temp/8638b956-b04a-4c02-8ce4-f594fe614d0c/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk {{(pid=61570) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2374.149265] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c83ef50-d5d4-47f5-9aea-4969449c160c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.160054] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for the task: (returnval){ [ 2374.160054] env[61570]: value = "task-4891492" [ 2374.160054] env[61570]: _type = "Task" [ 2374.160054] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.168301] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Task: {'id': task-4891492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.668511] env[61570]: DEBUG oslo_vmware.exceptions [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Fault InvalidArgument not matched. {{(pid=61570) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2374.668798] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2374.669367] env[61570]: ERROR nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2374.669367] env[61570]: Faults: ['InvalidArgument'] [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Traceback (most recent call last): [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] yield resources [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self.driver.spawn(context, instance, image_meta, [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self._fetch_image_if_missing(context, vi) [ 2374.669367] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] image_cache(vi, tmp_image_ds_loc) [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] vm_util.copy_virtual_disk( [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] session._wait_for_task(vmdk_copy_task) [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return self.wait_for_task(task_ref) [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return evt.wait() [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] result = hub.switch() [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2374.669728] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return self.greenlet.switch() [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self.f(*self.args, **self.kw) [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] raise exceptions.translate_fault(task_info.error) [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Faults: ['InvalidArgument'] [ 2374.670078] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] [ 2374.670078] env[61570]: INFO nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Terminating instance [ 2374.671297] env[61570]: DEBUG oslo_concurrency.lockutils [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2374.671502] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2374.671742] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46a92679-adc0-4422-bdcb-7c2db5b54826 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.674155] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2374.674346] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2374.675080] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238d801a-eaa6-4d4e-b5b7-f5e05017a055 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.682055] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Unregistering the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2374.682307] env[61570]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12e6d82d-303b-450a-8866-10d0128b9c2b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.684631] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2374.684810] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61570) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2374.685784] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac44b2b6-2c34-43da-94af-9381b7597bc5 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.691260] env[61570]: DEBUG oslo_vmware.api [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Waiting for the task: (returnval){ [ 2374.691260] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52eccfa9-670d-e050-d299-c00398ccf625" [ 2374.691260] env[61570]: _type = "Task" [ 2374.691260] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.698807] env[61570]: DEBUG oslo_vmware.api [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]52eccfa9-670d-e050-d299-c00398ccf625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.748661] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Unregistered the VM {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2374.748939] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Deleting contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2374.749116] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Deleting the datastore file [datastore2] 15cc451d-9419-4952-83a4-4fde3d237f8b {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2374.749384] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9afc5c24-b072-440a-a7c8-c9db7eb6ae1b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.756094] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for the task: (returnval){ [ 2374.756094] env[61570]: value = "task-4891494" [ 2374.756094] env[61570]: _type = "Task" [ 2374.756094] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2374.764602] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Task: {'id': task-4891494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2375.202371] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Preparing fetch location {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2375.202371] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Creating directory with path [datastore2] vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2375.202645] env[61570]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-116099f1-4545-4577-ae71-ffff9fe11042 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.215566] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Created directory with path [datastore2] vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2375.215566] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Fetch image to [datastore2] vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2375.215566] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to [datastore2] vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk on the data store datastore2 {{(pid=61570) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2375.216837] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721a6317-b749-4535-8c18-db1fc688cdaf {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.224773] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74dd6012-6a69-459f-be8d-8afa6fc8030a {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.243198] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13995fb-cc8a-45b8-8135-b072c36ee4f6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.282343] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c853907-70d6-4a6f-bec3-193ab7095d81 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.290472] env[61570]: DEBUG oslo_vmware.api [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Task: {'id': task-4891494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073801} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2375.292047] env[61570]: DEBUG nova.virt.vmwareapi.ds_util [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Deleted the datastore file {{(pid=61570) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2375.292246] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Deleted contents of the VM from datastore datastore2 {{(pid=61570) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2375.292421] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2375.292592] env[61570]: INFO nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2375.294657] env[61570]: DEBUG nova.compute.claims [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Aborting claim: {{(pid=61570) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2375.294817] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.295402] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.298467] env[61570]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2c3681ae-72b3-47c4-ab80-5f335e2dae35 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.323331] env[61570]: DEBUG nova.virt.vmwareapi.images [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Downloading image file data 64ba497f-0d92-47de-bece-8112101951ad to the data store datastore2 {{(pid=61570) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2375.376831] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Refreshing inventories for resource provider 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2375.392608] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Updating ProviderTree inventory for provider 829dc000-b508-440d-ae59-f7cfbca90113 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2375.394509] env[61570]: DEBUG nova.compute.provider_tree [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Updating inventory in ProviderTree for provider 829dc000-b508-440d-ae59-f7cfbca90113 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2375.409823] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Refreshing aggregate associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, aggregates: None {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2375.431162] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Refreshing trait associations for resource provider 829dc000-b508-440d-ae59-f7cfbca90113, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61570) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2375.516481] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2375.579021] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Completed reading data from the image iterator. {{(pid=61570) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2375.579021] env[61570]: DEBUG oslo_vmware.rw_handles [None req-9f288b7d-ce10-4783-aca9-fb23c0eeb3e8 tempest-ImagesOneServerTestJSON-1075599893 tempest-ImagesOneServerTestJSON-1075599893-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3658a40d-8f26-4aac-b554-aff4857ad341/64ba497f-0d92-47de-bece-8112101951ad/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61570) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2375.650087] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dfe707-2059-4fca-848f-c664ea6ae055 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.658653] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877c66bf-a7ca-4ac5-adee-aa7ff61d6082 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.688547] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec15c48e-b853-4825-b958-3c2c8bd83038 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.696997] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8cb7ea-9e11-4333-a919-209d887b7362 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.711292] env[61570]: DEBUG nova.compute.provider_tree [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2375.725567] env[61570]: DEBUG nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2375.743034] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.448s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.743612] env[61570]: ERROR nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2375.743612] env[61570]: Faults: ['InvalidArgument'] [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Traceback (most recent call last): [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self.driver.spawn(context, instance, image_meta, [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self._fetch_image_if_missing(context, vi) [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] image_cache(vi, tmp_image_ds_loc) [ 2375.743612] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] vm_util.copy_virtual_disk( [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] session._wait_for_task(vmdk_copy_task) [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return self.wait_for_task(task_ref) [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return evt.wait() [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] result = hub.switch() [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] return self.greenlet.switch() [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2375.744177] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] self.f(*self.args, **self.kw) [ 2375.744708] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2375.744708] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] raise exceptions.translate_fault(task_info.error) [ 2375.744708] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2375.744708] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Faults: ['InvalidArgument'] [ 2375.744708] env[61570]: ERROR nova.compute.manager [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] [ 2375.744708] env[61570]: DEBUG nova.compute.utils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] VimFaultException {{(pid=61570) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2375.745907] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Build of instance 15cc451d-9419-4952-83a4-4fde3d237f8b was re-scheduled: A specified parameter was not correct: fileType [ 2375.745907] env[61570]: Faults: ['InvalidArgument'] {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2375.746293] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Unplugging VIFs for instance {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2375.746464] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61570) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2375.746634] env[61570]: DEBUG nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2375.746806] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2376.194495] env[61570]: DEBUG nova.network.neutron [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.209070] env[61570]: INFO nova.compute.manager [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Took 0.46 seconds to deallocate network for instance. [ 2376.321191] env[61570]: INFO nova.scheduler.client.report [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Deleted allocations for instance 15cc451d-9419-4952-83a4-4fde3d237f8b [ 2376.365883] env[61570]: DEBUG oslo_concurrency.lockutils [None req-405100c6-4562-4e64-8c9a-98823c10ec4f tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.058s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2376.366109] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.080s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2376.366344] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Acquiring lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2376.366558] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2376.366731] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2376.368886] env[61570]: INFO nova.compute.manager [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Terminating instance [ 2376.370886] env[61570]: DEBUG nova.compute.manager [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Start destroying the instance on the hypervisor. {{(pid=61570) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2376.371090] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Destroying instance {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2376.371684] env[61570]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89677dfc-7bd4-4ef3-9ba7-5855c2a5341c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.381060] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05088362-0a74-45d9-bb6b-c488d80b21cd {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.416078] env[61570]: WARNING nova.virt.vmwareapi.vmops [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 15cc451d-9419-4952-83a4-4fde3d237f8b could not be found. [ 2376.416143] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Instance destroyed {{(pid=61570) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2376.416300] env[61570]: INFO nova.compute.manager [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2376.416547] env[61570]: DEBUG oslo.service.loopingcall [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2376.416795] env[61570]: DEBUG nova.compute.manager [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Deallocating network for instance {{(pid=61570) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2376.416889] env[61570]: DEBUG nova.network.neutron [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] deallocate_for_instance() {{(pid=61570) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2376.441532] env[61570]: DEBUG nova.network.neutron [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Updating instance_info_cache with network_info: [] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.452102] env[61570]: INFO nova.compute.manager [-] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] Took 0.04 seconds to deallocate network for instance. [ 2376.559584] env[61570]: DEBUG oslo_concurrency.lockutils [None req-871ac18a-48d0-4521-ba2e-26d21cb42d72 tempest-ServerMetadataTestJSON-1765319857 tempest-ServerMetadataTestJSON-1765319857-project-member] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2376.560448] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 32.341s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2376.560638] env[61570]: INFO nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 15cc451d-9419-4952-83a4-4fde3d237f8b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2376.560817] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "15cc451d-9419-4952-83a4-4fde3d237f8b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2378.753170] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.753170] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61570) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10626}} [ 2380.705737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquiring lock "d4d94b79-c406-446e-86ee-7945701e3bfe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.705737] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Lock "d4d94b79-c406-446e-86ee-7945701e3bfe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.717258] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Starting instance... {{(pid=61570) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2380.788024] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.788024] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.788024] env[61570]: INFO nova.compute.claims [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2381.003796] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8197ad-12fa-4202-91b0-364c54fd36b3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.012619] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159f8cec-d190-437d-a3ad-924af5667868 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.070642] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bcb111-be21-491b-8a84-083795c9ff9c {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.083992] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bbf069-5183-4601-9f45-64876052738d {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.101646] env[61570]: DEBUG nova.compute.provider_tree [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2381.113440] env[61570]: DEBUG nova.scheduler.client.report [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2381.129086] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.129590] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Start building networks asynchronously for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2381.172646] env[61570]: DEBUG nova.compute.utils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Using /dev/sd instead of None {{(pid=61570) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2381.174330] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Allocating IP information in the background. {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2381.174330] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] allocate_for_instance() {{(pid=61570) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2381.186034] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Start building block device mappings for instance. {{(pid=61570) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2381.233594] env[61570]: DEBUG nova.policy [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c22b225662c43ed846cf9cd9e116077', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fea3cb177a554b6babad1112137743db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61570) authorize /opt/stack/nova/nova/policy.py:203}} [ 2381.274555] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Start spawning the instance on the hypervisor. {{(pid=61570) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2381.300956] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-12-01T10:43:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-12-01T10:42:44Z,direct_url=,disk_format='vmdk',id=64ba497f-0d92-47de-bece-8112101951ad,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9026853fead54dcba8cbc4240690b9df',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-12-01T10:42:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2381.301228] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Flavor limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2381.301387] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Image limits 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2381.301571] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Flavor pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2381.301716] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Image pref 0:0:0 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2381.301861] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61570) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2381.302077] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2381.302240] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2381.302406] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Got 1 possible topologies {{(pid=61570) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2381.302567] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2381.302733] env[61570]: DEBUG nova.virt.hardware [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61570) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2381.303582] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe5075-d7c2-49e4-a116-deee4d434c2e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.312242] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537bf0b1-c552-405c-b30f-7a2cef45ea6f {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.613512] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Successfully created port: 045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2381.753226] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.753578] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.753619] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager.update_available_resource {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2381.767659] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.767877] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.768056] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.768225] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61570) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2381.769501] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1decea20-10e3-40a2-b754-e350e3ff84ca {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.778467] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5676091-8b3e-4fc2-a2b2-8ecb201d0b10 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.794936] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0d2ebf-965c-4e06-8d1b-b66c1a83540b {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.802298] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3acf8d-fc44-4f35-b76f-f48b9a524927 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.836510] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180596MB free_disk=96GB free_vcpus=48 pci_devices=None {{(pid=61570) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2381.836685] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.836937] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.918800] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 8133bec0-155c-4ffe-b972-adabe3b281dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919013] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919086] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance de0d43a3-122f-43de-9992-e30d2954408f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919212] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 2fde6764-2bfe-4634-b371-91bc1a5e38e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919332] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 345f407a-879d-4c87-810a-fbad1b1d4c07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919448] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance ece33fed-2e33-4876-83b5-7618968faa12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919564] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 9a6bb63e-ea38-476e-8597-aba1d55ed5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919678] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 4c56b397-97b5-4210-9130-0a8769f8f19c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919791] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance 977a0d78-1e10-4a67-8c90-54c385485622 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.919990] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Instance d4d94b79-c406-446e-86ee-7945701e3bfe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61570) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1710}} [ 2381.920094] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2381.920250] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] stats={'failed_builds': '90', 'num_instances': '10', 'num_vm_building': '10', 'num_task_deleting': '6', 'num_os_type_None': '10', 'num_proj_7e1c9d890ee242c9a13e7ebb409c9fb4': '1', 'io_workload': '10', 'num_proj_35794e305c9f4380b941db6b873ec99c': '1', 'num_proj_e11c8be84d5d443f9d287ef7c345fe9a': '1', 'num_proj_743eeefd02e04e63850742fc5590125f': '1', 'num_task_spawning': '4', 'num_proj_d1c11dd2b9174b0a91c5f74e9410913b': '2', 'num_proj_9879c18ea258468bad18ac62d0610b69': '1', 'num_proj_ad9eb62e68d84940a9f2ee2acbe60221': '1', 'num_proj_e0af458d2a48407f8609850f08fc65c1': '1', 'num_proj_fea3cb177a554b6babad1112137743db': '1'} {{(pid=61570) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2382.056313] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249b58fd-f7dd-4949-9071-0b371c516de6 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.064744] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616fbd2a-093d-47ad-b365-95cbe28806d1 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.096100] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb01a86-f110-4781-b648-fec60daf3f13 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.104888] env[61570]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329f21bc-24af-4906-b5cd-08188005bd1e {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.119691] env[61570]: DEBUG nova.compute.provider_tree [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed in ProviderTree for provider: 829dc000-b508-440d-ae59-f7cfbca90113 {{(pid=61570) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2382.131748] env[61570]: DEBUG nova.scheduler.client.report [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Inventory has not changed for provider 829dc000-b508-440d-ae59-f7cfbca90113 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 96, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61570) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2382.147696] env[61570]: DEBUG nova.compute.resource_tracker [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61570) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2382.147696] env[61570]: DEBUG oslo_concurrency.lockutils [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.311s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.197330] env[61570]: DEBUG nova.compute.manager [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Received event network-vif-plugged-045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2382.197575] env[61570]: DEBUG oslo_concurrency.lockutils [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] Acquiring lock "d4d94b79-c406-446e-86ee-7945701e3bfe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.197799] env[61570]: DEBUG oslo_concurrency.lockutils [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] Lock "d4d94b79-c406-446e-86ee-7945701e3bfe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2382.198139] env[61570]: DEBUG oslo_concurrency.lockutils [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] Lock "d4d94b79-c406-446e-86ee-7945701e3bfe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61570) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.198404] env[61570]: DEBUG nova.compute.manager [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] No waiting events found dispatching network-vif-plugged-045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2382.198579] env[61570]: WARNING nova.compute.manager [req-a929515d-d65a-4c4a-8da2-fd371313b979 req-fc7cabb0-4dbc-4fda-8b7f-dd11b2c9ba40 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Received unexpected event network-vif-plugged-045980fe-0127-4e8c-a6ac-799c4515a052 for instance with vm_state building and task_state spawning. [ 2382.285608] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Successfully updated port: 045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2382.297367] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquiring lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2382.297559] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquired lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2382.297717] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Building network info cache for instance {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2382.342157] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Instance cache missing network info. {{(pid=61570) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2382.547180] env[61570]: DEBUG nova.network.neutron [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Updating instance_info_cache with network_info: [{"id": "045980fe-0127-4e8c-a6ac-799c4515a052", "address": "fa:16:3e:cc:33:18", "network": {"id": "fb45dee7-7d6d-4b36-afc7-41f0b1083206", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-52591089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fea3cb177a554b6babad1112137743db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045980fe-01", "ovs_interfaceid": "045980fe-0127-4e8c-a6ac-799c4515a052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.560564] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Releasing lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2382.560873] env[61570]: DEBUG nova.compute.manager [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Instance network_info: |[{"id": "045980fe-0127-4e8c-a6ac-799c4515a052", "address": "fa:16:3e:cc:33:18", "network": {"id": "fb45dee7-7d6d-4b36-afc7-41f0b1083206", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-52591089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fea3cb177a554b6babad1112137743db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045980fe-01", "ovs_interfaceid": "045980fe-0127-4e8c-a6ac-799c4515a052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61570) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2382.561316] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:33:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '045980fe-0127-4e8c-a6ac-799c4515a052', 'vif_model': 'vmxnet3'}] {{(pid=61570) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2382.569704] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Creating folder: Project (fea3cb177a554b6babad1112137743db). Parent ref: group-v953072. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2382.570326] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48109bc8-1a61-4766-bd1b-8ae8f1088ba4 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.582248] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Created folder: Project (fea3cb177a554b6babad1112137743db) in parent group-v953072. [ 2382.584835] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Creating folder: Instances. Parent ref: group-v953201. {{(pid=61570) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2382.584835] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7ca2561-0256-4107-a952-e2d2c5952492 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.594256] env[61570]: INFO nova.virt.vmwareapi.vm_util [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Created folder: Instances in parent group-v953201. [ 2382.594627] env[61570]: DEBUG oslo.service.loopingcall [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61570) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2382.594886] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Creating VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2382.595256] env[61570]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f05cb072-4d2f-489e-a02a-e34483bf4c88 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.615670] env[61570]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2382.615670] env[61570]: value = "task-4891497" [ 2382.615670] env[61570]: _type = "Task" [ 2382.615670] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.624511] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891497, 'name': CreateVM_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.125998] env[61570]: DEBUG oslo_vmware.api [-] Task: {'id': task-4891497, 'name': CreateVM_Task, 'duration_secs': 0.322881} completed successfully. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2383.126375] env[61570]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Created VM on the ESX host {{(pid=61570) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2383.126844] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2383.127020] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2383.127397] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2383.127656] env[61570]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9c3f4b-454d-454b-bd81-e703a9d496c3 {{(pid=61570) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.132212] env[61570]: DEBUG oslo_vmware.api [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Waiting for the task: (returnval){ [ 2383.132212] env[61570]: value = "session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529b3e18-5a3d-cef0-036d-a05411b6a58f" [ 2383.132212] env[61570]: _type = "Task" [ 2383.132212] env[61570]: } to complete. {{(pid=61570) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2383.139888] env[61570]: DEBUG oslo_vmware.api [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Task: {'id': session[520ea153-1f07-f1c7-8903-e00561bb1f4b]529b3e18-5a3d-cef0-036d-a05411b6a58f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61570) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.644352] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2383.644414] env[61570]: DEBUG nova.virt.vmwareapi.vmops [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Processing image 64ba497f-0d92-47de-bece-8112101951ad {{(pid=61570) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2383.644655] env[61570]: DEBUG oslo_concurrency.lockutils [None req-753e4477-6ab2-4565-afd7-9a2d7fa69d8e tempest-AttachVolumeShelveTestJSON-1221667233 tempest-AttachVolumeShelveTestJSON-1221667233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/64ba497f-0d92-47de-bece-8112101951ad/64ba497f-0d92-47de-bece-8112101951ad.vmdk" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.148027] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.148432] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.228526] env[61570]: DEBUG nova.compute.manager [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Received event network-changed-045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11210}} [ 2384.228681] env[61570]: DEBUG nova.compute.manager [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Refreshing instance network info cache due to event network-changed-045980fe-0127-4e8c-a6ac-799c4515a052. {{(pid=61570) external_instance_event /opt/stack/nova/nova/compute/manager.py:11215}} [ 2384.228890] env[61570]: DEBUG oslo_concurrency.lockutils [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] Acquiring lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.229078] env[61570]: DEBUG oslo_concurrency.lockutils [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] Acquired lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2384.229294] env[61570]: DEBUG nova.network.neutron [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Refreshing network info cache for port 045980fe-0127-4e8c-a6ac-799c4515a052 {{(pid=61570) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2384.508948] env[61570]: DEBUG nova.network.neutron [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Updated VIF entry in instance network info cache for port 045980fe-0127-4e8c-a6ac-799c4515a052. {{(pid=61570) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2384.509348] env[61570]: DEBUG nova.network.neutron [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Updating instance_info_cache with network_info: [{"id": "045980fe-0127-4e8c-a6ac-799c4515a052", "address": "fa:16:3e:cc:33:18", "network": {"id": "fb45dee7-7d6d-4b36-afc7-41f0b1083206", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-52591089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fea3cb177a554b6babad1112137743db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap045980fe-01", "ovs_interfaceid": "045980fe-0127-4e8c-a6ac-799c4515a052", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61570) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2384.519578] env[61570]: DEBUG oslo_concurrency.lockutils [req-f767ab1c-cdea-4701-9a4b-9feac28ae699 req-b5e9f537-2239-4090-a078-114080fafb84 service nova] Releasing lock "refresh_cache-d4d94b79-c406-446e-86ee-7945701e3bfe" {{(pid=61570) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2384.753957] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.754163] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Starting heal instance info cache {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10007}} [ 2384.754287] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Rebuilding the list of instances to heal {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10011}} [ 2384.776805] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 8133bec0-155c-4ffe-b972-adabe3b281dc] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777008] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 7cbe40c4-0c9d-4ce7-bcb0-0481a170398c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777216] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: de0d43a3-122f-43de-9992-e30d2954408f] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777389] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 2fde6764-2bfe-4634-b371-91bc1a5e38e4] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777526] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 345f407a-879d-4c87-810a-fbad1b1d4c07] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777648] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: ece33fed-2e33-4876-83b5-7618968faa12] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777768] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 9a6bb63e-ea38-476e-8597-aba1d55ed5f5] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.777885] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 4c56b397-97b5-4210-9130-0a8769f8f19c] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.778009] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: 977a0d78-1e10-4a67-8c90-54c385485622] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.778134] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] [instance: d4d94b79-c406-446e-86ee-7945701e3bfe] Skipping network cache update for instance because it is Building. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10020}} [ 2384.778256] env[61570]: DEBUG nova.compute.manager [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Didn't find any instances for network info cache update. {{(pid=61570) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10093}} [ 2385.753609] env[61570]: DEBUG oslo_service.periodic_task [None req-58658313-4c4b-43f9-b29a-75c47e23604f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61570) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}