[ 620.821451] env[61473]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61473) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 620.821792] env[61473]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61473) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 620.821845] env[61473]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61473) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 620.822201] env[61473]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 620.912547] env[61473]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61473) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 620.922410] env[61473]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61473) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 621.059031] env[61473]: INFO nova.virt.driver [None req-2aec4d7d-53d5-4d7e-a232-02e99e08bb2e None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 621.129611] env[61473]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.129775] env[61473]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.129872] env[61473]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61473) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 624.180049] env[61473]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-a50322e2-5ab5-4ef0-8ed3-0727ec868e00 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.195888] env[61473]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61473) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 624.196065] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-8e08ac47-6949-4da1-8b1e-7f2dc99ab548 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.228959] env[61473]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4a076. [ 624.229114] env[61473]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.099s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.229679] env[61473]: INFO nova.virt.vmwareapi.driver [None req-2aec4d7d-53d5-4d7e-a232-02e99e08bb2e None None] VMware vCenter version: 7.0.3 [ 624.233343] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4173731a-cbe0-4a0f-b364-334c87793711 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.251108] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1be32d-2260-495d-83b5-0e32abf092d6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.256867] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5207147-b1c1-444f-bb40-8c2acc7fc585 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.263409] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cac264-b41b-4e05-8a65-ff43d5b98565 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.276510] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cf9fa6-1d44-4ac9-ae89-ed2777d2cc51 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.282311] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edb2822-96a1-40e8-bc35-b5677b60b420 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.312748] env[61473]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-73b3b3f4-65d4-4ddc-99c6-62e582ee53aa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.317963] env[61473]: DEBUG nova.virt.vmwareapi.driver [None req-2aec4d7d-53d5-4d7e-a232-02e99e08bb2e None None] Extension org.openstack.compute already exists. {{(pid=61473) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 624.320640] env[61473]: INFO nova.compute.provider_config [None req-2aec4d7d-53d5-4d7e-a232-02e99e08bb2e None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 624.339747] env[61473]: DEBUG nova.context [None req-2aec4d7d-53d5-4d7e-a232-02e99e08bb2e None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),ff21aaeb-1ffc-405a-83d7-a9bc15c0a8c7(cell1) {{(pid=61473) load_cells /opt/stack/nova/nova/context.py:464}} [ 624.341610] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.341846] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.342524] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.342929] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Acquiring lock "ff21aaeb-1ffc-405a-83d7-a9bc15c0a8c7" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.343139] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Lock "ff21aaeb-1ffc-405a-83d7-a9bc15c0a8c7" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.344190] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Lock "ff21aaeb-1ffc-405a-83d7-a9bc15c0a8c7" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.364091] env[61473]: INFO dbcounter [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Registered counter for database nova_cell0 [ 624.372133] env[61473]: INFO dbcounter [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Registered counter for database nova_cell1 [ 624.375151] env[61473]: DEBUG oslo_db.sqlalchemy.engines [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61473) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 624.375740] env[61473]: DEBUG oslo_db.sqlalchemy.engines [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61473) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 624.379711] env[61473]: DEBUG dbcounter [-] [61473] Writer thread running {{(pid=61473) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 624.381149] env[61473]: DEBUG dbcounter [-] [61473] Writer thread running {{(pid=61473) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 624.382769] env[61473]: ERROR nova.db.main.api [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 624.382769] env[61473]: result = function(*args, **kwargs) [ 624.382769] env[61473]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.382769] env[61473]: return func(*args, **kwargs) [ 624.382769] env[61473]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 624.382769] env[61473]: result = fn(*args, **kwargs) [ 624.382769] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 624.382769] env[61473]: return f(*args, **kwargs) [ 624.382769] env[61473]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 624.382769] env[61473]: return db.service_get_minimum_version(context, binaries) [ 624.382769] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 624.382769] env[61473]: _check_db_access() [ 624.382769] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 624.382769] env[61473]: stacktrace = ''.join(traceback.format_stack()) [ 624.382769] env[61473]: [ 624.383923] env[61473]: ERROR nova.db.main.api [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 624.383923] env[61473]: result = function(*args, **kwargs) [ 624.383923] env[61473]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.383923] env[61473]: return func(*args, **kwargs) [ 624.383923] env[61473]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 624.383923] env[61473]: result = fn(*args, **kwargs) [ 624.383923] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 624.383923] env[61473]: return f(*args, **kwargs) [ 624.383923] env[61473]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 624.383923] env[61473]: return db.service_get_minimum_version(context, binaries) [ 624.383923] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 624.383923] env[61473]: _check_db_access() [ 624.383923] env[61473]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 624.383923] env[61473]: stacktrace = ''.join(traceback.format_stack()) [ 624.383923] env[61473]: [ 624.384355] env[61473]: WARNING nova.objects.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 624.384442] env[61473]: WARNING nova.objects.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Failed to get minimum service version for cell ff21aaeb-1ffc-405a-83d7-a9bc15c0a8c7 [ 624.384864] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Acquiring lock "singleton_lock" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.385023] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Acquired lock "singleton_lock" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.385278] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Releasing lock "singleton_lock" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.385594] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Full set of CONF: {{(pid=61473) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 624.385734] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ******************************************************************************** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 624.385861] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] Configuration options gathered from: {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 624.385997] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 624.386197] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 624.386326] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ================================================================================ {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 624.386535] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] allow_resize_to_same_host = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.386704] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] arq_binding_timeout = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.386836] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] backdoor_port = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.386963] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] backdoor_socket = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387166] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] block_device_allocate_retries = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387300] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] block_device_allocate_retries_interval = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387466] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cert = self.pem {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387631] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387797] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute_monitors = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.387961] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] config_dir = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388145] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] config_drive_format = iso9660 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388278] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388440] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] config_source = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388609] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] console_host = devstack {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388772] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] control_exchange = nova {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.388930] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cpu_allocation_ratio = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389103] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] daemon = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389273] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] debug = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389429] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_access_ip_network_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389595] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_availability_zone = nova {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389751] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_ephemeral_format = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.389911] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_green_pool_size = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390168] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390337] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] default_schedule_zone = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390497] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] disk_allocation_ratio = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390656] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] enable_new_services = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390833] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] enabled_apis = ['osapi_compute'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.390999] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] enabled_ssl_apis = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.391172] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] flat_injected = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.391332] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] force_config_drive = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.391489] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] force_raw_images = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.391658] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] graceful_shutdown_timeout = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.391830] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] heal_instance_info_cache_interval = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392085] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] host = cpu-1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392270] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392436] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] initial_disk_allocation_ratio = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392598] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] initial_ram_allocation_ratio = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392814] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.392979] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_build_timeout = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.393156] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_delete_interval = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.393325] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_format = [instance: %(uuid)s] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.393489] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_name_template = instance-%08x {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.393649] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_usage_audit = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.393821] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_usage_audit_period = month {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394021] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394198] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] instances_path = /opt/stack/data/nova/instances {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394370] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] internal_service_availability_zone = internal {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394527] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] key = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394687] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] live_migration_retry_count = 30 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.394865] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_config_append = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395060] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395229] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_dir = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395390] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395518] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_options = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395679] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_rotate_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.395847] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_rotate_interval_type = days {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396023] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] log_rotation_type = none {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396159] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396286] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396457] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396622] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396750] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.396915] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] long_rpc_timeout = 1800 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397085] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_concurrent_builds = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397262] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_concurrent_live_migrations = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397400] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_concurrent_snapshots = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397556] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_local_block_devices = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397712] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_logfile_count = 30 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.397867] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] max_logfile_size_mb = 200 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398086] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] maximum_instance_delete_attempts = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398204] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metadata_listen = 0.0.0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398368] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metadata_listen_port = 8775 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398536] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metadata_workers = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398696] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] migrate_max_retries = -1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.398863] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] mkisofs_cmd = genisoimage {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399081] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] my_block_storage_ip = 10.180.1.21 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399217] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] my_ip = 10.180.1.21 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399380] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] network_allocate_retries = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399559] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399727] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] osapi_compute_listen = 0.0.0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.399888] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] osapi_compute_listen_port = 8774 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400071] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] osapi_compute_unique_server_name_scope = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400241] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] osapi_compute_workers = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400402] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] password_length = 12 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400562] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] periodic_enable = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400722] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] periodic_fuzzy_delay = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.400888] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] pointer_model = usbtablet {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401067] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] preallocate_images = none {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401242] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] publish_errors = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401376] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] pybasedir = /opt/stack/nova {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401532] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ram_allocation_ratio = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401692] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rate_limit_burst = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.401860] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rate_limit_except_level = CRITICAL {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402030] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rate_limit_interval = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402194] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reboot_timeout = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402354] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reclaim_instance_interval = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402510] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] record = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402677] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reimage_timeout_per_gb = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.402842] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] report_interval = 120 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403013] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rescue_timeout = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403177] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reserved_host_cpus = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403333] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reserved_host_disk_mb = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403489] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reserved_host_memory_mb = 512 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403644] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] reserved_huge_pages = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403800] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] resize_confirm_window = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.403981] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] resize_fs_using_block_device = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.404161] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] resume_guests_state_on_host_boot = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.404331] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.404491] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] rpc_response_timeout = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.404648] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] run_external_periodic_tasks = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.404817] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] running_deleted_instance_action = reap {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405009] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] running_deleted_instance_poll_interval = 1800 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405182] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] running_deleted_instance_timeout = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405342] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler_instance_sync_interval = 120 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405508] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_down_time = 720 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405677] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] servicegroup_driver = db {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405835] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] shelved_offload_time = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.405995] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] shelved_poll_interval = 3600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.406175] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] shutdown_timeout = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.406335] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] source_is_ipv6 = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.406492] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ssl_only = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.406727] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.406894] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] sync_power_state_interval = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407069] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] sync_power_state_pool_size = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407241] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] syslog_log_facility = LOG_USER {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407397] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] tempdir = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407555] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] timeout_nbd = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407722] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] transport_url = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.407886] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] update_resources_interval = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408056] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_cow_images = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408219] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_eventlog = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408377] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_journal = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408532] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_json = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408689] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_rootwrap_daemon = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.408845] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_stderr = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409009] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] use_syslog = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409176] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vcpu_pin_set = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409342] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plugging_is_fatal = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409509] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plugging_timeout = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409672] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] virt_mkfs = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409834] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] volume_usage_poll_interval = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.409994] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] watch_log_file = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.410174] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] web = /usr/share/spice-html5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 624.410351] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_concurrency.disable_process_locking = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.410624] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.410803] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.410967] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.411154] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.411324] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.411488] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.411667] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.auth_strategy = keystone {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.411834] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.compute_link_prefix = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412024] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412202] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.dhcp_domain = novalocal {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412372] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.enable_instance_password = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412538] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.glance_link_prefix = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412702] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.412875] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413053] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.instance_list_per_project_cells = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413222] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.list_records_by_skipping_down_cells = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413386] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.local_metadata_per_cell = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413555] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.max_limit = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413725] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.metadata_cache_expiration = 15 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.413932] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.neutron_default_tenant_id = default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.414128] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.use_neutron_default_nets = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.414312] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.414476] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.414644] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.414829] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415044] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_dynamic_targets = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415223] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_jsonfile_path = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415406] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415597] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.backend = dogpile.cache.memcached {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415764] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.backend_argument = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.415934] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.config_prefix = cache.oslo {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416120] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.dead_timeout = 60.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416292] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.debug_cache_backend = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416455] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.enable_retry_client = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416617] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.enable_socket_keepalive = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416788] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.enabled = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.416953] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.enforce_fips_mode = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417133] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.expiration_time = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417299] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.hashclient_retry_attempts = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417466] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.hashclient_retry_delay = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417629] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_dead_retry = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417789] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_password = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.417958] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418137] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418303] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_pool_maxsize = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418464] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418626] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_sasl_enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418805] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.418969] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_socket_timeout = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419146] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.memcache_username = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419312] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.proxies = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419473] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_password = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419643] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_sentinel_service_name = mymaster {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419817] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.419991] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_server = localhost:6379 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420171] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_socket_timeout = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420331] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.redis_username = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420494] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.retry_attempts = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420660] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.retry_delay = 0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420828] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.socket_keepalive_count = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.420990] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.socket_keepalive_idle = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421166] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.socket_keepalive_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421327] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.tls_allowed_ciphers = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421491] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.tls_cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421646] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.tls_certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421807] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.tls_enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.421962] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cache.tls_keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422147] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422319] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.auth_type = password {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422479] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422652] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.catalog_info = volumev3::publicURL {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422811] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.422972] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423151] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.cross_az_attach = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423314] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.debug = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423474] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.endpoint_template = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423637] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.http_retries = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423798] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.423982] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.424176] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.os_region_name = RegionOne {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.424363] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.424535] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cinder.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.424710] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.424898] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.cpu_dedicated_set = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425085] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.cpu_shared_set = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425259] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.image_type_exclude_list = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425424] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425588] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.max_concurrent_disk_ops = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425753] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.max_disk_devices_to_attach = -1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.425915] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426099] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426268] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.resource_provider_association_refresh = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426430] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426595] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.shutdown_retry_interval = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426773] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.426953] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] conductor.workers = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427142] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] console.allowed_origins = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427307] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] console.ssl_ciphers = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427479] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] console.ssl_minimum_version = default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427649] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] consoleauth.enforce_session_timeout = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427813] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] consoleauth.token_ttl = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.427979] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428152] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428318] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428477] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428635] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428796] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.428956] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429126] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429287] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429445] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429601] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429755] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.429909] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430111] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.service_type = accelerator {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430254] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430412] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430566] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430721] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.430899] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431070] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] cyborg.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431253] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.backend = sqlalchemy {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431429] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.connection = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431593] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.connection_debug = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431761] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.connection_parameters = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.431923] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.connection_recycle_time = 3600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432100] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.connection_trace = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432264] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.db_inc_retry_interval = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432428] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.db_max_retries = 20 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432590] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.db_max_retry_interval = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432751] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.db_retry_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.432914] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.max_overflow = 50 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433086] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.max_pool_size = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433254] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.max_retries = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433424] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433581] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.mysql_wsrep_sync_wait = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433738] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.pool_timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.433925] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.retry_interval = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.434110] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.slave_connection = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.434297] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.sqlite_synchronous = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.434482] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] database.use_db_reconnect = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.434664] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.backend = sqlalchemy {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.434848] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.connection = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435051] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.connection_debug = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435233] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.connection_parameters = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435399] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.connection_recycle_time = 3600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435562] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.connection_trace = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435725] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.db_inc_retry_interval = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.435890] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.db_max_retries = 20 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436065] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.db_max_retry_interval = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436232] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.db_retry_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436395] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.max_overflow = 50 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436955] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.max_pool_size = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436955] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.max_retries = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.436955] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437123] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437187] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.pool_timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437352] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.retry_interval = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437510] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.slave_connection = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437671] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] api_database.sqlite_synchronous = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.437844] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] devices.enabled_mdev_types = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438030] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438216] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ephemeral_storage_encryption.default_format = luks {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438383] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ephemeral_storage_encryption.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438548] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438719] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.api_servers = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.438883] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439059] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439228] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439391] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439548] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439709] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.debug = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.439873] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.default_trusted_certificate_ids = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440044] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.enable_certificate_validation = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440213] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.enable_rbd_download = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440376] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440541] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440704] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.440863] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441031] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441202] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.num_retries = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441373] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.rbd_ceph_conf = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441537] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.rbd_connect_timeout = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441707] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.rbd_pool = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.441873] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.rbd_user = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442040] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442202] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442360] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442527] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.service_type = image {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442693] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.442854] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443029] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443194] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443377] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443542] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.verify_glance_signatures = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443701] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] glance.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.443888] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] guestfs.debug = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.444085] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] mks.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.444444] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.444638] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.manager_interval = 2400 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.444815] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.precache_concurrency = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445018] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.remove_unused_base_images = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445201] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445376] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445557] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] image_cache.subdirectory_name = _base {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445736] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.api_max_retries = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.445905] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.api_retry_interval = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446081] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446287] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.auth_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446466] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446629] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446797] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.446964] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.conductor_group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447141] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447302] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447460] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447623] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447781] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.447940] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448113] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448283] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.peer_list = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448444] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448604] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448768] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.serial_console_state_timeout = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.448929] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449112] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.service_type = baremetal {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449276] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.shard = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449443] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449603] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449763] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.449922] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450113] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450274] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ironic.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450458] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450632] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] key_manager.fixed_key = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450815] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.450979] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.barbican_api_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451157] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.barbican_endpoint = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451334] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.barbican_endpoint_type = public {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451493] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.barbican_region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451650] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451810] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.451976] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452152] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452311] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452478] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.number_of_retries = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452639] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.retry_delay = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452799] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.send_service_user_token = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.452957] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453129] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453293] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.verify_ssl = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453452] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican.verify_ssl_path = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453616] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453780] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.auth_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.453967] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454146] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454313] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454475] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454633] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454797] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.454980] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] barbican_service_user.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455168] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.approle_role_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455331] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.approle_secret_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455487] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455642] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455802] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.455962] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456136] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456341] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.kv_mountpoint = secret {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456507] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.kv_path = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456674] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.kv_version = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456833] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.namespace = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.456999] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.root_token_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457177] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457335] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.ssl_ca_crt_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457492] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457654] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.use_ssl = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457853] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.457987] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.458161] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.auth_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.458351] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.458520] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.458686] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.458842] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459008] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459173] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459341] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459501] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459644] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459791] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.459950] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460119] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460279] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460449] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.service_type = identity {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460611] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460770] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.460927] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.461111] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.461358] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.461535] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] keystone.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.461743] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.connection_uri = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.461910] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_mode = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462095] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_model_extra_flags = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462274] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_models = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462450] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_power_governor_high = performance {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462620] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_power_governor_low = powersave {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462787] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_power_management = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.462961] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.463146] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.device_detach_attempts = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.463313] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.device_detach_timeout = 20 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.463482] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.disk_cachemodes = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.463644] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.disk_prefix = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.463809] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.enabled_perf_events = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464015] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.file_backed_memory = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464193] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.gid_maps = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464356] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.hw_disk_discard = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464540] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.hw_machine_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464732] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_rbd_ceph_conf = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.464924] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465115] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465294] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_rbd_glance_store_name = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465467] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_rbd_pool = rbd {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465640] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_type = default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465802] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.images_volume_group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.465969] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.inject_key = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466155] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.inject_partition = -2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466323] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.inject_password = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466489] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.iscsi_iface = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466654] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.iser_use_multipath = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466822] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_bandwidth = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.466987] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467170] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_downtime = 500 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467339] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467504] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467665] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_inbound_addr = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467828] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.467995] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_permit_post_copy = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.468176] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_scheme = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.468355] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_timeout_action = abort {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.468523] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_tunnelled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.468687] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_uri = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.468853] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.live_migration_with_native_tls = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.469026] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.max_queues = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.469197] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.469428] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.469592] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.nfs_mount_options = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.469883] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470073] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470251] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_iser_scan_tries = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470449] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_memory_encrypted_guests = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470622] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470790] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_pcie_ports = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.470963] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.num_volume_scan_tries = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.471148] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.pmem_namespaces = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.471314] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.quobyte_client_cfg = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.471600] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.471776] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rbd_connect_timeout = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.471944] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472123] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472289] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rbd_secret_uuid = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472451] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rbd_user = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472617] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472793] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.remote_filesystem_transport = ssh {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.472958] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rescue_image_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.473135] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rescue_kernel_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.473297] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rescue_ramdisk_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.473468] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.473631] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.rx_queue_size = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.473805] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.smbfs_mount_options = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.474125] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.474309] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.snapshot_compression = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.474475] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.snapshot_image_format = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.474699] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.474889] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.sparse_logical_volumes = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475083] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.swtpm_enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475266] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.swtpm_group = tss {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475441] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.swtpm_user = tss {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475614] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.sysinfo_serial = unique {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475777] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.tb_cache_size = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.475940] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.tx_queue_size = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.476122] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.uid_maps = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.476323] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.use_virtio_for_bridges = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.476505] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.virt_type = kvm {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.476679] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.volume_clear = zero {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.476845] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.volume_clear_size = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477027] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.volume_use_multipath = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477198] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_cache_path = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477372] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477543] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_mount_group = qemu {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477712] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_mount_opts = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.477882] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.478176] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.478357] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.vzstorage_mount_user = stack {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.478525] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.478700] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.478877] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.auth_type = password {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479053] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479222] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479388] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479549] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479709] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.479881] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.default_floating_pool = public {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480056] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480223] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.extension_sync_interval = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480389] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.http_retries = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480550] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480712] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.480873] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481061] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481230] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481405] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.ovs_bridge = br-int {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481572] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.physnets = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481747] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.region_name = RegionOne {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.481909] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482093] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.service_metadata_proxy = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482257] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482461] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.service_type = network {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482636] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482807] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.482981] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.483159] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.483344] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.483507] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] neutron.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.483680] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] notifications.bdms_in_notifications = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.483888] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] notifications.default_level = INFO {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.484088] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] notifications.notification_format = unversioned {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.484262] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] notifications.notify_on_state_change = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.484441] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.484620] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] pci.alias = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.484791] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] pci.device_spec = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485014] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] pci.report_in_placement = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485210] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485388] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.auth_type = password {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485560] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485725] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.485892] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486067] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486258] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486429] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486590] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.default_domain_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486749] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.default_domain_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.486908] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.domain_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487080] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.domain_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487245] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487410] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487570] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487731] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.487890] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488073] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.password = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488240] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.project_domain_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488407] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.project_domain_name = Default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488574] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.project_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488749] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.project_name = service {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.488920] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.region_name = RegionOne {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489096] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489261] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489432] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.service_type = placement {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489597] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489763] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.489926] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490101] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.system_scope = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490268] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490430] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.trust_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490593] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.user_domain_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490764] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.user_domain_name = Default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.490928] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.user_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491118] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.username = placement {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491304] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491467] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] placement.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491646] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.cores = 20 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491813] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.count_usage_from_placement = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.491984] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.492175] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.injected_file_content_bytes = 10240 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.492344] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.injected_file_path_length = 255 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.492510] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.injected_files = 5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.492678] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.instances = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.492845] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.key_pairs = 100 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493022] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.metadata_items = 128 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493193] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.ram = 51200 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493361] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.recheck_quota = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493532] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.server_group_members = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493700] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] quota.server_groups = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.493908] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.494099] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.494272] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.image_metadata_prefilter = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.494468] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.494649] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.max_attempts = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.494822] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.max_placement_results = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495024] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495200] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.query_placement_for_image_type_support = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495367] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495548] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] scheduler.workers = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495730] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.495906] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496098] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496274] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496442] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496609] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496776] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.496966] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497152] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.host_subset_size = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497319] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497481] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497643] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497808] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.isolated_hosts = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.497972] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.isolated_images = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498150] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498318] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498485] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498649] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.pci_in_placement = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498810] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.498973] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499152] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499312] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499475] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499636] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499798] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.track_instance_changes = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.499971] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.500159] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metrics.required = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.500327] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metrics.weight_multiplier = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.500488] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.500650] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] metrics.weight_setting = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.500979] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.501175] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.501358] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.port_range = 10000:20000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.501533] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.501708] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.501876] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] serial_console.serialproxy_port = 6083 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502057] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502237] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.auth_type = password {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502402] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502562] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502727] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.502891] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503063] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503241] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.send_service_user_token = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503406] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503566] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] service_user.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503751] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.agent_enabled = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.503942] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.504291] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.504488] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.504658] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.html5proxy_port = 6082 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.504827] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.image_compression = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505027] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.jpeg_compression = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505197] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.playback_compression = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505369] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.server_listen = 127.0.0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505538] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505697] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.streaming_mode = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.505923] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] spice.zlib_compression = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506123] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] upgrade_levels.baseapi = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506302] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] upgrade_levels.compute = auto {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506461] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] upgrade_levels.conductor = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506619] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] upgrade_levels.scheduler = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506784] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.506945] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.auth_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507120] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507280] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507443] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507603] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507762] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.507922] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.508092] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vendordata_dynamic_auth.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.508269] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.api_retry_count = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.508466] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.ca_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.508669] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.cache_prefix = devstack-image-cache {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.508847] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.cluster_name = testcl1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509027] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.connection_pool_size = 10 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509195] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.console_delay_seconds = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509366] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.datastore_regex = ^datastore.* {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509572] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509748] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.host_password = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.509915] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.host_port = 443 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510099] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.host_username = administrator@vsphere.local {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510272] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.insecure = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510437] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.integration_bridge = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510604] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.maximum_objects = 100 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510763] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.pbm_default_policy = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.510927] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.pbm_enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511104] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.pbm_wsdl_location = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511281] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511444] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.serial_port_proxy_uri = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511604] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.serial_port_service_uri = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511775] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.task_poll_interval = 0.5 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.511948] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.use_linked_clone = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.512135] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.vnc_keymap = en-us {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.512305] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.vnc_port = 5900 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.512472] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vmware.vnc_port_total = 10000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.512659] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.auth_schemes = ['none'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.512837] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.513169] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.513362] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.513540] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.novncproxy_port = 6080 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.513719] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.server_listen = 127.0.0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.513917] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514113] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.vencrypt_ca_certs = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514281] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.vencrypt_client_cert = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514442] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vnc.vencrypt_client_key = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514624] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514792] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_deep_image_inspection = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.514987] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515173] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515340] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515504] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.disable_rootwrap = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515668] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.enable_numa_live_migration = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515831] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.515996] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.516192] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.516387] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.libvirt_disable_apic = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.516555] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.516721] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.516885] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517059] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517227] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517389] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517551] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517713] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.517875] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518049] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518239] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518441] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.client_socket_timeout = 900 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518581] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.default_pool_size = 1000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518749] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.keep_alive = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.518921] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.max_header_line = 16384 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519103] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.secure_proxy_ssl_header = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519273] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.ssl_ca_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519440] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.ssl_cert_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519604] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.ssl_key_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519772] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.tcp_keepidle = 600 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.519950] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.520144] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] zvm.ca_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.520311] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] zvm.cloud_connector_url = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.520603] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.520781] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] zvm.reachable_timeout = 300 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.520967] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.enforce_new_defaults = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.521157] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.enforce_scope = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.521337] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.policy_default_rule = default {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.521522] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.521701] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.policy_file = policy.yaml {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.521875] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522051] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522217] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522377] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522541] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522710] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.522886] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523077] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.connection_string = messaging:// {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523250] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.enabled = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523423] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.es_doc_type = notification {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523587] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.es_scroll_size = 10000 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523759] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.es_scroll_time = 2m {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.523946] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.filter_error_trace = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.524143] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.hmac_keys = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.524316] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.sentinel_service_name = mymaster {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.524485] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.socket_timeout = 0.1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.524649] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.trace_requests = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.524813] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler.trace_sqlalchemy = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525054] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler_jaeger.process_tags = {} {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525237] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler_jaeger.service_name_prefix = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525406] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] profiler_otlp.service_name_prefix = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525575] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] remote_debug.host = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525737] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] remote_debug.port = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.525917] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526097] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526299] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526475] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526640] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526801] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.526971] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527146] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527310] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527483] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527642] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527809] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.527975] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.528159] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.528362] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.528541] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.528705] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.528881] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529059] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529229] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529445] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529615] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529778] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.529949] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530127] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530291] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530454] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530616] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530781] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.530948] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531136] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531311] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531474] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531645] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531816] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.ssl_version = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.531980] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.532187] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.532359] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_notifications.retry = -1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.532545] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.532721] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_messaging_notifications.transport_url = **** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.532896] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.auth_section = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533076] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.auth_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533244] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.cafile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533404] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.certfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533565] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.collect_timing = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533725] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.connect_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.533912] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.connect_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534096] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.endpoint_id = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534263] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.endpoint_override = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534429] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.insecure = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534590] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.keyfile = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534747] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.max_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.534929] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.min_version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535107] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.region_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535273] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.retriable_status_codes = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535432] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.service_name = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535592] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.service_type = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535753] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.split_loggers = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.535913] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.status_code_retries = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536084] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.status_code_retry_delay = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536247] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.timeout = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536406] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.valid_interfaces = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536565] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_limit.version = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536732] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_reports.file_event_handler = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.536899] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537115] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] oslo_reports.log_dir = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537298] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537465] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537629] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537798] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.537966] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538144] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538320] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538482] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538639] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538805] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.538971] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.539145] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] vif_plug_ovs_privileged.user = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.539317] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.flat_interface = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.539498] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.539674] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.539850] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540033] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540207] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540377] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540543] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540722] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.540896] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.isolate_vif = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541079] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541252] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541461] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541647] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.ovsdb_interface = native {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541816] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_vif_ovs.per_port_bridge = False {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.541984] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_brick.lock_path = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.542171] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.542338] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.542511] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.capabilities = [21] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.542674] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.542836] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.helper_command = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543008] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543184] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543345] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] privsep_osbrick.user = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543518] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543681] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.group = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.543872] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.helper_command = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.544052] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.544227] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.544390] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] nova_sys_admin.user = None {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 624.544521] env[61473]: DEBUG oslo_service.service [None req-ed3fe7e4-b55b-4c7d-ade9-b6307ea86abc None None] ******************************************************************************** {{(pid=61473) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 624.544982] env[61473]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 624.554764] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Getting list of instances from cluster (obj){ [ 624.554764] env[61473]: value = "domain-c8" [ 624.554764] env[61473]: _type = "ClusterComputeResource" [ 624.554764] env[61473]: } {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 624.556040] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eae09b5-8d32-4b72-828c-daec5a0948e1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.566173] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Got total of 0 instances {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 624.566749] env[61473]: WARNING nova.virt.vmwareapi.driver [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 624.567301] env[61473]: INFO nova.virt.node [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Generated node identity 89e0ead3-8356-4b9c-95ce-a1a119b67576 [ 624.567618] env[61473]: INFO nova.virt.node [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Wrote node identity 89e0ead3-8356-4b9c-95ce-a1a119b67576 to /opt/stack/data/n-cpu-1/compute_id [ 624.580107] env[61473]: WARNING nova.compute.manager [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Compute nodes ['89e0ead3-8356-4b9c-95ce-a1a119b67576'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 624.614754] env[61473]: INFO nova.compute.manager [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 624.638894] env[61473]: WARNING nova.compute.manager [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 624.639163] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.639381] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.639534] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.639683] env[61473]: DEBUG nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 624.640789] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e6431b-a5db-490b-a946-55660488d78f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.648525] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1627a0-022f-4822-8795-fe2de219bf2c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.662655] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52690697-aafb-437b-80ea-475fdfa89b0f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.668629] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24c4b0f-815f-4d33-a2c1-f5b634e4caf2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.698504] env[61473]: DEBUG nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180643MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 624.698627] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.698794] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.710669] env[61473]: WARNING nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] No compute node record for cpu-1:89e0ead3-8356-4b9c-95ce-a1a119b67576: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 89e0ead3-8356-4b9c-95ce-a1a119b67576 could not be found. [ 624.724038] env[61473]: INFO nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 89e0ead3-8356-4b9c-95ce-a1a119b67576 [ 624.792686] env[61473]: DEBUG nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 624.792856] env[61473]: DEBUG nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=183GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 624.910393] env[61473]: INFO nova.scheduler.client.report [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] [req-05438dce-0bdb-4930-9b36-25b252f5bc64] Created resource provider record via placement API for resource provider with UUID 89e0ead3-8356-4b9c-95ce-a1a119b67576 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 624.927313] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192d93bd-3c64-4ccd-ad1a-351c09d09294 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.935079] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691757d2-5c36-4b84-9e9a-c7bb6ddfe42a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.964236] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ae6745-da50-4a39-b5e8-1584a35c1930 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.971168] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b80319-e380-4616-af3a-d03258ebca28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.984756] env[61473]: DEBUG nova.compute.provider_tree [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 625.026088] env[61473]: DEBUG nova.scheduler.client.report [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Updated inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 625.026345] env[61473]: DEBUG nova.compute.provider_tree [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Updating resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 generation from 0 to 1 during operation: update_inventory {{(pid=61473) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 625.026490] env[61473]: DEBUG nova.compute.provider_tree [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 625.077189] env[61473]: DEBUG nova.compute.provider_tree [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Updating resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 generation from 1 to 2 during operation: update_traits {{(pid=61473) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 625.095379] env[61473]: DEBUG nova.compute.resource_tracker [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 625.095584] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.397s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.095748] env[61473]: DEBUG nova.service [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Creating RPC server for service compute {{(pid=61473) start /opt/stack/nova/nova/service.py:182}} [ 625.108256] env[61473]: DEBUG nova.service [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] Join ServiceGroup membership for this service compute {{(pid=61473) start /opt/stack/nova/nova/service.py:199}} [ 625.108439] env[61473]: DEBUG nova.servicegroup.drivers.db [None req-7df50fde-9c52-4d34-b0dc-bbaacdf1e1c9 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61473) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 634.382065] env[61473]: DEBUG dbcounter [-] [61473] Writing DB stats nova_cell0:SELECT=1 {{(pid=61473) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 634.383800] env[61473]: DEBUG dbcounter [-] [61473] Writing DB stats nova_cell1:SELECT=1 {{(pid=61473) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 669.735135] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.735135] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.763389] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 669.922611] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.925477] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.928799] env[61473]: INFO nova.compute.claims [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.088527] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6593ca-f51e-4d7d-8759-ca2ba57a1f14 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.099036] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6422dca-60cc-4245-b663-40785b40ab32 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.139683] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2e1158-fc83-4a31-960c-5fb6743a935f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.147328] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb90f96-d46c-4359-a0d1-6f91b666ffaa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.164884] env[61473]: DEBUG nova.compute.provider_tree [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.179733] env[61473]: DEBUG nova.scheduler.client.report [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.206505] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.207094] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 670.267178] env[61473]: DEBUG nova.compute.utils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.268987] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 670.269246] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 670.296346] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 670.427907] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "5a1247fd-0053-444a-bb93-2ff419d9e102" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.427907] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.468405] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 670.476099] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 670.561458] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.561714] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.563247] env[61473]: INFO nova.compute.claims [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.665946] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 670.666214] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 670.666374] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.666579] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 670.670146] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.670297] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 670.670536] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 670.670712] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 670.671162] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 670.671797] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 670.671797] env[61473]: DEBUG nova.virt.hardware [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 670.672819] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319c3e0f-7e1f-4046-bfc4-957dd6d10153 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.687069] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af15f0ea-0313-4e28-ac76-483d9f33e718 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.711650] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741a0d10-b9ad-4b3d-9561-d3dae371a0ef {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.776881] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac53c3ef-e89e-4033-8137-2baf52922102 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.785314] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb517b54-78d1-4e03-bb6c-8f3cf071ccc0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.829534] env[61473]: DEBUG nova.policy [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c140a7b4b4ee4377b7feffca65b51487', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab22d0685dc14f2088749991e0351228', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 670.833279] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873baf3d-c159-4c1d-8474-6290eb14a153 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.838795] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.839039] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.845753] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50be01c-7a78-4bee-affc-a7d23074e438 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.864275] env[61473]: DEBUG nova.compute.provider_tree [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.866539] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 670.881017] env[61473]: DEBUG nova.scheduler.client.report [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.900450] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.900450] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 670.952333] env[61473]: DEBUG nova.compute.utils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.954997] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 670.955239] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 670.968025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.968025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.968512] env[61473]: INFO nova.compute.claims [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.974717] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 671.045948] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 671.084048] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.084048] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.084048] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.084240] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.084240] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.084341] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.085954] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.086157] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.086579] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.086579] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.086722] env[61473]: DEBUG nova.virt.hardware [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.088298] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec19b58-599a-4008-b72d-33a2073defe7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.100892] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8800e7a3-8298-4afd-b968-730cc7a8932e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.117331] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5254ea-2a30-4805-a6e6-e8b92230458f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.125364] env[61473]: DEBUG nova.policy [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b31d23009204023b5f463430132a591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '414d6bce704b4427bfbad8fdb7a414d8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 671.127507] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962d8d04-e020-4981-ac65-8037b01f4517 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.158171] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cca907-c536-48b2-b1bc-3408e7f87302 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.165129] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfcfa0c-e7a9-4911-af30-754dc6ecbfd9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.179342] env[61473]: DEBUG nova.compute.provider_tree [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.195824] env[61473]: DEBUG nova.scheduler.client.report [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.225545] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.226058] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 671.278165] env[61473]: DEBUG nova.compute.utils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.280650] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 671.280650] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 671.289307] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 671.405440] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 671.442252] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.442515] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.442672] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.442852] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.442998] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.444312] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.444814] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.444814] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.444907] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.445129] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.445209] env[61473]: DEBUG nova.virt.hardware [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.446377] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e9b5bf-f21d-4810-8fae-ecc583dd1505 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.457505] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7510c4fb-fc35-4b10-8ad2-5f512acd6aab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.536980] env[61473]: DEBUG nova.policy [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '896a0b4143414c70bb11d46694aafa8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b322bc8e088f4bd59b862253fad1aadf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 671.648302] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.648554] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.662680] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 671.722895] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.722895] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.722895] env[61473]: INFO nova.compute.claims [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.869383] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01f2ca5-74b1-42bd-953d-54b4d240fa80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.876816] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b78adf1-c1c5-4ed9-98a3-587c722f6db8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.915730] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bc45f3-39a3-490d-a43b-db8ba8c95c55 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.924333] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b155c75-f310-4a85-964f-87e99cd280d0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.944054] env[61473]: DEBUG nova.compute.provider_tree [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.958251] env[61473]: DEBUG nova.scheduler.client.report [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.979977] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.980544] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 672.020301] env[61473]: DEBUG nova.compute.utils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 672.021607] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 672.021774] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 672.036417] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 672.127173] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 672.155953] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.156214] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.156374] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.156553] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.156695] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.156839] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.157626] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.157856] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.158060] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.158806] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.158806] env[61473]: DEBUG nova.virt.hardware [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.162458] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bce134-036e-4cb0-84fb-5bd04060f666 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.170166] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159b2800-2e6d-4c7d-8a6d-477e53c546f7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.262224] env[61473]: DEBUG nova.policy [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c154754906f54a1694754c4b54fa3a9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5783e3416f14b31a8061891e753e939', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 672.799942] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Successfully created port: 520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.806698] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Successfully created port: a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.135388] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Successfully created port: 91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.359662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.359662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.381411] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 673.494236] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.494236] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.494236] env[61473]: INFO nova.compute.claims [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.709696] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45c014-3c25-47b7-8f4d-bf19684aac07 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.717928] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede38101-c40f-4252-8292-37c29ba11f76 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.756385] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78b1351-ec02-4ee8-be59-98b7bea82769 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.764352] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04dedc2-2a57-47f0-bdbf-14de90207068 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.782691] env[61473]: DEBUG nova.compute.provider_tree [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.796436] env[61473]: DEBUG nova.scheduler.client.report [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 673.819531] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.820981] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 673.875854] env[61473]: DEBUG nova.compute.utils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.877595] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 673.877767] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 673.889152] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 673.964418] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 673.986131] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 673.986347] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 673.986750] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.986750] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 673.986860] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.986976] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 673.987299] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 673.987526] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 673.987752] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 673.987968] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 673.988251] env[61473]: DEBUG nova.virt.hardware [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 673.991929] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8059c7-4d45-4e0b-97cb-4713ffc401c0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.997165] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a8769-c2be-4db9-b938-761ef5535ad7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.183333] env[61473]: DEBUG nova.policy [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa72838d6ec74c2ebac9d403f5ac1cf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5fd5d032e047b8b77b2b727a03f01c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 674.893726] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "5d67907c-7199-4734-a5cc-4466703eaa51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.893726] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.909287] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 675.004406] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.004662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.010268] env[61473]: INFO nova.compute.claims [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.147841] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Successfully created port: 39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.219393] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d5f34d-ac20-4cd6-9901-924318e74a24 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.228240] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c44eebf-0bfe-490b-aa42-83837de2887a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.263150] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a2054a-6f57-47e8-88eb-0691bc066096 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.270505] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185c6745-6eb0-490d-ae1f-f873e3e69c80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.290926] env[61473]: DEBUG nova.compute.provider_tree [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.303125] env[61473]: DEBUG nova.scheduler.client.report [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.329489] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.330114] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 675.405315] env[61473]: DEBUG nova.compute.utils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.409881] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 675.409881] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 675.421803] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 675.575443] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 675.621667] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 675.621870] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 675.621932] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.622118] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 675.622269] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.622411] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 675.624015] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 675.624015] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 675.624015] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 675.624015] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 675.624015] env[61473]: DEBUG nova.virt.hardware [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 675.624269] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d6c811-6677-4127-a131-067e2a825037 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.633848] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532bc80f-b5c6-41a8-925f-d4601d7133ce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.929168] env[61473]: DEBUG nova.policy [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba6dfa5ca0c74d02bc8a4951e3dfc911', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90e2c696fdd143598a730850ede006f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 676.190375] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Successfully updated port: 520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.210922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.210922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquired lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.211127] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 676.284029] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Successfully updated port: 91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.318882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.319224] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquired lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.319544] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 676.505720] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.636938] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.008108] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Successfully created port: 87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.323754] env[61473]: DEBUG nova.compute.manager [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Received event network-vif-plugged-520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 677.324044] env[61473]: DEBUG oslo_concurrency.lockutils [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] Acquiring lock "4703897a-ce8c-4bca-89e9-62fe53c5d404-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.324198] env[61473]: DEBUG oslo_concurrency.lockutils [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.324377] env[61473]: DEBUG oslo_concurrency.lockutils [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.324657] env[61473]: DEBUG nova.compute.manager [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] No waiting events found dispatching network-vif-plugged-520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 677.324736] env[61473]: WARNING nova.compute.manager [req-b37cf0cb-bbc1-427c-9a46-665cfa4efb6c req-e4d74c07-c92e-4c6c-9aea-688962b83027 service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Received unexpected event network-vif-plugged-520570e4-302c-42f9-91d0-5b3e506d354c for instance with vm_state building and task_state spawning. [ 677.408152] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Successfully updated port: a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.440376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.440376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquired lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.440376] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.712281] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.788362] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Updating instance_info_cache with network_info: [{"id": "520570e4-302c-42f9-91d0-5b3e506d354c", "address": "fa:16:3e:0b:cb:65", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520570e4-30", "ovs_interfaceid": "520570e4-302c-42f9-91d0-5b3e506d354c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.807783] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Releasing lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.808131] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Instance network_info: |[{"id": "520570e4-302c-42f9-91d0-5b3e506d354c", "address": "fa:16:3e:0b:cb:65", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520570e4-30", "ovs_interfaceid": "520570e4-302c-42f9-91d0-5b3e506d354c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 677.808659] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:cb:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '520570e4-302c-42f9-91d0-5b3e506d354c', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.822415] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.823954] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Updating instance_info_cache with network_info: [{"id": "91ee22c0-0b61-4519-b8b3-88982857d750", "address": "fa:16:3e:6f:47:2a", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ee22c0-0b", "ovs_interfaceid": "91ee22c0-0b61-4519-b8b3-88982857d750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.825185] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-543a0573-4728-47b9-8ab7-b7a43f8c8a80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.839882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Releasing lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.840281] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Instance network_info: |[{"id": "91ee22c0-0b61-4519-b8b3-88982857d750", "address": "fa:16:3e:6f:47:2a", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ee22c0-0b", "ovs_interfaceid": "91ee22c0-0b61-4519-b8b3-88982857d750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 677.842182] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:47:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91ee22c0-0b61-4519-b8b3-88982857d750', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.849842] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.850632] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Created folder: OpenStack in parent group-v4. [ 677.850896] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating folder: Project (ab22d0685dc14f2088749991e0351228). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.851478] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9301cb33-c8a5-4d87-a35e-9a55b3e36370 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.853488] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5d15cad-3cdd-47cb-9da7-a3c1bc0ded18 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.867453] env[61473]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 677.867722] env[61473]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61473) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 677.868150] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.868415] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating folder: Project (b322bc8e088f4bd59b862253fad1aadf). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.868690] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-488da269-117b-460c-8e17-117e43026df5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.872847] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Created folder: Project (ab22d0685dc14f2088749991e0351228) in parent group-v843485. [ 677.873104] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating folder: Instances. Parent ref: group-v843486. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.873374] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6e02dd3-5043-4cfa-85b3-0910d7b4236b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.879492] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Created folder: Project (b322bc8e088f4bd59b862253fad1aadf) in parent group-v843485. [ 677.879744] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating folder: Instances. Parent ref: group-v843487. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 677.880016] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17a4835e-260d-4949-a4c7-32630a2ba679 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.884490] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Created folder: Instances in parent group-v843486. [ 677.884824] env[61473]: DEBUG oslo.service.loopingcall [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.885086] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 677.885343] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70a611f4-fcd8-475d-91b4-a1324ca56b20 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.904932] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Created folder: Instances in parent group-v843487. [ 677.905458] env[61473]: DEBUG oslo.service.loopingcall [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.906126] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 677.906412] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68605379-929b-438b-b420-40ea27939532 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.927323] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.927323] env[61473]: value = "task-4281497" [ 677.927323] env[61473]: _type = "Task" [ 677.927323] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.933787] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.933787] env[61473]: value = "task-4281498" [ 677.933787] env[61473]: _type = "Task" [ 677.933787] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.938706] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281497, 'name': CreateVM_Task} progress is 10%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.948780] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281498, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.442360] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281497, 'name': CreateVM_Task, 'duration_secs': 0.353746} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.444979] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 678.453973] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281498, 'name': CreateVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.474138] env[61473]: DEBUG oslo_vmware.service [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e0c9cb-3501-4b7a-93bc-ea49340f7a17 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.483587] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.483767] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.484575] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 678.484772] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c1f4dfe-5de2-48ad-bf4e-99e72ad7b38e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.490648] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Waiting for the task: (returnval){ [ 678.490648] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52f3f463-dde9-df11-358e-d6c770d83c8e" [ 678.490648] env[61473]: _type = "Task" [ 678.490648] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.504314] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52f3f463-dde9-df11-358e-d6c770d83c8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.666828] env[61473]: DEBUG nova.compute.manager [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Received event network-vif-plugged-a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 678.667050] env[61473]: DEBUG oslo_concurrency.lockutils [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] Acquiring lock "5a1247fd-0053-444a-bb93-2ff419d9e102-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.667262] env[61473]: DEBUG oslo_concurrency.lockutils [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.667433] env[61473]: DEBUG oslo_concurrency.lockutils [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.667601] env[61473]: DEBUG nova.compute.manager [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] No waiting events found dispatching network-vif-plugged-a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 678.667760] env[61473]: WARNING nova.compute.manager [req-9937cdb2-34da-4932-99cf-31f0988192f1 req-4161153d-2ad3-4c60-94ea-b0d35c6048b5 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Received unexpected event network-vif-plugged-a6508f95-2e4d-44d5-a82e-b4b7a3b4afae for instance with vm_state building and task_state spawning. [ 678.950827] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Successfully created port: 8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.961687] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281498, 'name': CreateVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.005322] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.005502] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.005750] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.005896] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.006362] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.006628] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfd3c9e3-03e7-4b06-9d55-30392df7f49a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.014347] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.014490] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 679.015327] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4919a5-e799-48af-9982-af312b840b7f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.022015] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d8ef1ae-78f5-4e34-91ce-2796b939ee1e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.027250] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Waiting for the task: (returnval){ [ 679.027250] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]524c1fdc-5ac5-5253-3a6a-5cd711f6f82c" [ 679.027250] env[61473]: _type = "Task" [ 679.027250] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.035656] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]524c1fdc-5ac5-5253-3a6a-5cd711f6f82c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.055581] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Updating instance_info_cache with network_info: [{"id": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "address": "fa:16:3e:9a:30:2f", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6508f95-2e", "ovs_interfaceid": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.068224] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Releasing lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.068386] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Instance network_info: |[{"id": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "address": "fa:16:3e:9a:30:2f", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6508f95-2e", "ovs_interfaceid": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 679.068794] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:30:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6508f95-2e4d-44d5-a82e-b4b7a3b4afae', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.077377] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Creating folder: Project (414d6bce704b4427bfbad8fdb7a414d8). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 679.078369] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0a61e1b-18f0-40b1-be54-b6aa5f70fea6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.088962] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Created folder: Project (414d6bce704b4427bfbad8fdb7a414d8) in parent group-v843485. [ 679.089190] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Creating folder: Instances. Parent ref: group-v843492. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 679.089427] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62a997c0-6cde-4334-9f54-f8c940cd38c5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.097953] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Created folder: Instances in parent group-v843492. [ 679.098183] env[61473]: DEBUG oslo.service.loopingcall [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.098367] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 679.098562] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30cc681f-4442-42d3-8925-57cf182a3401 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.124547] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.124547] env[61473]: value = "task-4281501" [ 679.124547] env[61473]: _type = "Task" [ 679.124547] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.132937] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281501, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.455848] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281498, 'name': CreateVM_Task, 'duration_secs': 1.352153} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.456401] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 679.457404] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.457404] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.460035] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 679.460035] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d5c75e8-12aa-4eb8-98de-6cbdf1c3ec2c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.465553] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Waiting for the task: (returnval){ [ 679.465553] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52c75a07-0b35-bbd0-1d8a-f96a52a81fb4" [ 679.465553] env[61473]: _type = "Task" [ 679.465553] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.478035] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52c75a07-0b35-bbd0-1d8a-f96a52a81fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.544479] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 679.544479] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating directory with path [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.544479] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b46446c-823b-4ede-9b04-2fccae5d3d04 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.563502] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Created directory with path [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.563991] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Fetch image to [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 679.563991] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 679.566941] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d417939-fe79-4e77-9fbc-2a436ec6b293 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.572491] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d5761f-9c9d-4430-a43d-1ddaa2557086 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.585723] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7992fd74-62fa-4e02-8e52-6997db79ab0a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.630157] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da577819-2d7f-48bc-a4c7-3401bcba1a8a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.639923] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca2f1408-d0cb-4a40-8533-b85503dcf462 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.641939] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281501, 'name': CreateVM_Task, 'duration_secs': 0.312011} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.642178] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 679.643210] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.676956] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 679.760516] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 679.825639] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 679.825900] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 679.872152] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Successfully updated port: 39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.889125] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.889286] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquired lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.889500] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.981363] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.981631] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.981847] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.982085] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.982444] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 679.982669] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d6d9d1-ae6f-4360-a5a8-976d8044b9fa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.991720] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Waiting for the task: (returnval){ [ 679.991720] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52bd8f78-00c0-4ada-c940-a0e702c18f6a" [ 679.991720] env[61473]: _type = "Task" [ 679.991720] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.001415] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52bd8f78-00c0-4ada-c940-a0e702c18f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.010823] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.170308] env[61473]: DEBUG nova.compute.manager [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Received event network-changed-a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 680.170308] env[61473]: DEBUG nova.compute.manager [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Refreshing instance network info cache due to event network-changed-a6508f95-2e4d-44d5-a82e-b4b7a3b4afae. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 680.170308] env[61473]: DEBUG oslo_concurrency.lockutils [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] Acquiring lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.170308] env[61473]: DEBUG oslo_concurrency.lockutils [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] Acquired lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.170308] env[61473]: DEBUG nova.network.neutron [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Refreshing network info cache for port a6508f95-2e4d-44d5-a82e-b4b7a3b4afae {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 680.506218] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.506218] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.506218] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.792033] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Updating instance_info_cache with network_info: [{"id": "39689a89-cd1a-49c1-9960-b462a5c81961", "address": "fa:16:3e:df:fb:34", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39689a89-cd", "ovs_interfaceid": "39689a89-cd1a-49c1-9960-b462a5c81961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.815750] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Releasing lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.816023] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance network_info: |[{"id": "39689a89-cd1a-49c1-9960-b462a5c81961", "address": "fa:16:3e:df:fb:34", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39689a89-cd", "ovs_interfaceid": "39689a89-cd1a-49c1-9960-b462a5c81961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 680.816708] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:fb:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39689a89-cd1a-49c1-9960-b462a5c81961', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.827494] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Creating folder: Project (c5783e3416f14b31a8061891e753e939). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 680.828267] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28ab8d69-b5f1-4119-9173-a3d951c49df6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.842293] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Created folder: Project (c5783e3416f14b31a8061891e753e939) in parent group-v843485. [ 680.842501] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Creating folder: Instances. Parent ref: group-v843495. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 680.846120] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8ebca5d-349a-4805-9cf8-7d807f254566 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.856404] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Created folder: Instances in parent group-v843495. [ 680.856787] env[61473]: DEBUG oslo.service.loopingcall [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.856877] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 680.858526] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0ead0cc-4ef8-4d55-bd79-f5ad7dca0ed4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.891208] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.891208] env[61473]: value = "task-4281504" [ 680.891208] env[61473]: _type = "Task" [ 680.891208] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.895241] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Successfully updated port: 87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.912212] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281504, 'name': CreateVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.914703] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Received event network-vif-plugged-91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 680.914980] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Acquiring lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.918901] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.919037] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.004s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.919416] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] No waiting events found dispatching network-vif-plugged-91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 680.919416] env[61473]: WARNING nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Received unexpected event network-vif-plugged-91ee22c0-0b61-4519-b8b3-88982857d750 for instance with vm_state building and task_state spawning. [ 680.919545] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Received event network-changed-520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 680.919708] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Refreshing instance network info cache due to event network-changed-520570e4-302c-42f9-91d0-5b3e506d354c. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 680.919909] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Acquiring lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.920052] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Acquired lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.920223] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Refreshing network info cache for port 520570e4-302c-42f9-91d0-5b3e506d354c {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 680.924985] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.924985] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.924985] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.098039] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.193525] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "dc8b5106-5657-409b-b425-b929c8e893d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.197023] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.211741] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 681.296238] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.296238] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.299356] env[61473]: INFO nova.compute.claims [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.371738] env[61473]: DEBUG nova.network.neutron [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Updated VIF entry in instance network info cache for port a6508f95-2e4d-44d5-a82e-b4b7a3b4afae. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 681.372124] env[61473]: DEBUG nova.network.neutron [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Updating instance_info_cache with network_info: [{"id": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "address": "fa:16:3e:9a:30:2f", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6508f95-2e", "ovs_interfaceid": "a6508f95-2e4d-44d5-a82e-b4b7a3b4afae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.388262] env[61473]: DEBUG oslo_concurrency.lockutils [req-5c6ca02f-9928-4fcf-b6c9-8b0c9b18594d req-5a30c7f3-836e-4864-88cd-71f60034a780 service nova] Releasing lock "refresh_cache-5a1247fd-0053-444a-bb93-2ff419d9e102" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.403851] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281504, 'name': CreateVM_Task, 'duration_secs': 0.358304} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.404628] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 681.405371] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.405528] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.405835] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 681.406095] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21009e25-70fe-4cb5-8529-a899a42a0ac4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.412472] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for the task: (returnval){ [ 681.412472] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5233b760-52b5-b655-1397-13cd2d73ddfd" [ 681.412472] env[61473]: _type = "Task" [ 681.412472] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.424465] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5233b760-52b5-b655-1397-13cd2d73ddfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.581988] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06647178-113c-4ba3-a65c-e57c729f1104 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.594831] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb875d0b-f4cd-46e1-a5d2-b7d62da1d81b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.629855] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b2dc69-ca4b-4305-ba88-d8a00b7a9d9f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.637392] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249473f1-c4f4-41e3-a4a6-2b5b64f92406 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.650678] env[61473]: DEBUG nova.compute.provider_tree [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.667481] env[61473]: DEBUG nova.scheduler.client.report [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.688020] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.390s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.688020] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 681.742019] env[61473]: DEBUG nova.compute.utils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.742392] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Not allocating networking since 'none' was specified. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 681.755160] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 681.869036] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 681.909999] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.910483] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.910552] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.910695] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.911055] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.911055] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.911207] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.911360] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.911518] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.911672] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.911833] env[61473]: DEBUG nova.virt.hardware [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.914155] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b811ea73-e57c-4314-be07-79e083c52c1f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.932625] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add248b8-1879-4a75-9c09-cc07b967b442 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.938095] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.938357] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.938599] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.949630] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance VIF info [] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.956290] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Creating folder: Project (cf0686e0082d47a9a7abc511cb0f1ab2). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 681.956694] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd811bde-2daa-41d2-ab4b-27233bdcd15f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.968911] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Created folder: Project (cf0686e0082d47a9a7abc511cb0f1ab2) in parent group-v843485. [ 681.969139] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Creating folder: Instances. Parent ref: group-v843498. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 681.969812] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b2a238d-fa12-4c28-810d-7ec8dc98c94f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.980488] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Created folder: Instances in parent group-v843498. [ 681.980488] env[61473]: DEBUG oslo.service.loopingcall [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 681.980700] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 681.980956] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4555745c-b587-4982-918a-e21f9bf73746 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.995939] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Successfully updated port: 8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.003207] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.003207] env[61473]: value = "task-4281507" [ 682.003207] env[61473]: _type = "Task" [ 682.003207] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.012661] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.012989] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.013113] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 682.022945] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281507, 'name': CreateVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.110352] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.110699] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.110904] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 682.111036] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 682.139902] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140074] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140225] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140352] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140472] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140587] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140718] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 682.140840] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 682.142505] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.142762] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.142966] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.143188] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.143378] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.143569] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_power_states {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.173676] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Getting list of instances from cluster (obj){ [ 682.173676] env[61473]: value = "domain-c8" [ 682.173676] env[61473]: _type = "ClusterComputeResource" [ 682.173676] env[61473]: } {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 682.175144] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce61b1-f8d2-48c7-8940-e0da01824933 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.192877] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Got total of 4 instances {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 682.193138] env[61473]: WARNING nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] While synchronizing instance power states, found 7 instances in the database and 4 instances on the hypervisor. [ 682.193310] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 4703897a-ce8c-4bca-89e9-62fe53c5d404 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.193869] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 5a1247fd-0053-444a-bb93-2ff419d9e102 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.194126] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid f0aae1f8-14cb-4fd5-900b-1b062c7f6783 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.194586] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 28261a41-7f6d-495c-abbd-7f73f67e80d6 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.194586] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.194704] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 5d67907c-7199-4734-a5cc-4466703eaa51 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.195341] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid dc8b5106-5657-409b-b425-b929c8e893d5 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 682.195341] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.195445] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "5a1247fd-0053-444a-bb93-2ff419d9e102" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.195580] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.195781] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.196451] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.196451] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "5d67907c-7199-4734-a5cc-4466703eaa51" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.196451] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "dc8b5106-5657-409b-b425-b929c8e893d5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.196695] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.196695] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 682.196852] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.214582] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.214582] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.214582] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.214582] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 682.215627] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e695ca8-5120-4ad3-afcb-1d8fcaccb4fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.226104] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fe3cf8-a3d8-401f-a63d-42a0e5e74101 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.244390] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3022ab21-f2a5-4501-b267-e1dfa3906367 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.251782] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310dba11-e746-4b7f-a3a3-27cefad5e1fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.287361] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180635MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 682.287532] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.287740] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.290457] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.300040] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Updating instance_info_cache with network_info: [{"id": "87afae59-2053-45fe-99c0-071f8dd27793", "address": "fa:16:3e:50:6e:0a", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87afae59-20", "ovs_interfaceid": "87afae59-2053-45fe-99c0-071f8dd27793", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.315032] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.315032] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance network_info: |[{"id": "87afae59-2053-45fe-99c0-071f8dd27793", "address": "fa:16:3e:50:6e:0a", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87afae59-20", "ovs_interfaceid": "87afae59-2053-45fe-99c0-071f8dd27793", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 682.315238] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:6e:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87afae59-2053-45fe-99c0-071f8dd27793', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.322173] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating folder: Project (1b5fd5d032e047b8b77b2b727a03f01c). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.326157] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e616ff1-659a-48a7-bc78-91b7d58800e0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.340903] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created folder: Project (1b5fd5d032e047b8b77b2b727a03f01c) in parent group-v843485. [ 682.341861] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating folder: Instances. Parent ref: group-v843501. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.341861] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adb2a22f-89f3-4c1a-8cc3-c39cab7f8dff {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.352030] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created folder: Instances in parent group-v843501. [ 682.352386] env[61473]: DEBUG oslo.service.loopingcall [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.354537] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 682.354537] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc13d9a6-d801-4b04-98a6-1424789bc4a8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.385327] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.385327] env[61473]: value = "task-4281510" [ 682.385327] env[61473]: _type = "Task" [ 682.385327] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.393029] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281510, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.407564] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4703897a-ce8c-4bca-89e9-62fe53c5d404 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.407715] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a1247fd-0053-444a-bb93-2ff419d9e102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408198] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f0aae1f8-14cb-4fd5-900b-1b062c7f6783 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408198] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408198] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408198] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408367] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dc8b5106-5657-409b-b425-b929c8e893d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.408515] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 682.408657] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=183GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 682.521994] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281507, 'name': CreateVM_Task, 'duration_secs': 0.305904} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.523057] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 682.523295] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.523378] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.523726] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 682.524681] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Updated VIF entry in instance network info cache for port 520570e4-302c-42f9-91d0-5b3e506d354c. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 682.524974] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Updating instance_info_cache with network_info: [{"id": "520570e4-302c-42f9-91d0-5b3e506d354c", "address": "fa:16:3e:0b:cb:65", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520570e4-30", "ovs_interfaceid": "520570e4-302c-42f9-91d0-5b3e506d354c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.526135] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2be9ba49-0660-4adb-b952-8f971357d4e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.536145] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for the task: (returnval){ [ 682.536145] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52b71db6-744f-c13a-2768-f7ac2563bb04" [ 682.536145] env[61473]: _type = "Task" [ 682.536145] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.541734] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Releasing lock "refresh_cache-4703897a-ce8c-4bca-89e9-62fe53c5d404" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.542135] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Received event network-changed-91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 682.542504] env[61473]: DEBUG nova.compute.manager [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Refreshing instance network info cache due to event network-changed-91ee22c0-0b61-4519-b8b3-88982857d750. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 682.542865] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Acquiring lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.545516] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Acquired lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.545516] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Refreshing network info cache for port 91ee22c0-0b61-4519-b8b3-88982857d750 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 682.554377] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.556079] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 682.557267] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.583193] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bad76f0-f268-4c6d-9c55-434a52055d5f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.596204] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da0148e-a41f-4b46-926f-977f1f2f9c48 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.634130] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f57265f-7825-4064-b825-677382e93c33 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.641736] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991d22ea-f923-400d-9abb-053b49e629a3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.660486] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.672726] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.692032] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 682.693122] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.404s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.693122] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 682.693122] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Getting list of instances from cluster (obj){ [ 682.693122] env[61473]: value = "domain-c8" [ 682.693122] env[61473]: _type = "ClusterComputeResource" [ 682.693122] env[61473]: } {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 682.694055] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7517f5-4789-450c-90cd-2c979e686cbe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.708944] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Got total of 6 instances {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 682.863632] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.863864] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.876165] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 682.895109] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281510, 'name': CreateVM_Task, 'duration_secs': 0.310955} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.895683] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 682.896379] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.896586] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.897019] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 682.897164] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2bacdf6-0dd0-4a7d-aaac-edf5b87a5efc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.902357] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 682.902357] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]526e8c67-2570-63b3-5315-82a2a3dc95ea" [ 682.902357] env[61473]: _type = "Task" [ 682.902357] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.912062] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]526e8c67-2570-63b3-5315-82a2a3dc95ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.940367] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.940613] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.942854] env[61473]: INFO nova.compute.claims [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.057165] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Updating instance_info_cache with network_info: [{"id": "8c86498c-1c65-4c5b-b786-6fe92233129a", "address": "fa:16:3e:fa:3e:19", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c86498c-1c", "ovs_interfaceid": "8c86498c-1c65-4c5b-b786-6fe92233129a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.082195] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.082523] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance network_info: |[{"id": "8c86498c-1c65-4c5b-b786-6fe92233129a", "address": "fa:16:3e:fa:3e:19", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c86498c-1c", "ovs_interfaceid": "8c86498c-1c65-4c5b-b786-6fe92233129a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 683.083598] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:3e:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c86498c-1c65-4c5b-b786-6fe92233129a', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.093240] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating folder: Project (90e2c696fdd143598a730850ede006f6). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 683.094214] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-950cad2e-cf37-4049-ae28-356230ee9c6f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.105237] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created folder: Project (90e2c696fdd143598a730850ede006f6) in parent group-v843485. [ 683.105491] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating folder: Instances. Parent ref: group-v843504. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 683.105756] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-168c3c6b-dd61-467c-86cb-baf76b330ace {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.114111] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created folder: Instances in parent group-v843504. [ 683.114394] env[61473]: DEBUG oslo.service.loopingcall [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 683.114586] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 683.114783] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7b122c1-dd5e-46d1-b156-c403508b3b30 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.147396] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.147396] env[61473]: value = "task-4281513" [ 683.147396] env[61473]: _type = "Task" [ 683.147396] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.157040] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281513, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.208764] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20110e14-ca76-4847-aa4c-3a39652b835d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.217611] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf6c95a-60f7-44bb-8ecf-043169659f38 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.262024] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9adcb14-381d-41fd-8856-5718923abf80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.273769] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1431190-7713-4615-9f01-9f31649a700b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.291418] env[61473]: DEBUG nova.compute.provider_tree [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.307262] env[61473]: DEBUG nova.scheduler.client.report [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.326953] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.386s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.327396] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 683.371982] env[61473]: DEBUG nova.compute.utils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 683.373524] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 683.373696] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 683.388054] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 683.414763] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.415217] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.415331] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.418630] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.418844] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.446741] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 683.505542] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 683.531344] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.531607] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.533434] env[61473]: INFO nova.compute.claims [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.546242] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.546484] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.546672] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.546860] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.547013] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.547657] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.547875] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.548050] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.548223] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.548385] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.548646] env[61473]: DEBUG nova.virt.hardware [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.549585] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b883822c-a7ce-4ffe-be54-4ab5a3a13c33 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.558857] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d849e3de-8b73-4dee-8d8b-5e91574a5e6b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.564522] env[61473]: DEBUG nova.policy [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8d4c400e2914b20a65740acf0f83eac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f7852f1d30c4031b9e98dc36cf1a10c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 683.643594] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Updated VIF entry in instance network info cache for port 91ee22c0-0b61-4519-b8b3-88982857d750. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 683.643594] env[61473]: DEBUG nova.network.neutron [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Updating instance_info_cache with network_info: [{"id": "91ee22c0-0b61-4519-b8b3-88982857d750", "address": "fa:16:3e:6f:47:2a", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ee22c0-0b", "ovs_interfaceid": "91ee22c0-0b61-4519-b8b3-88982857d750", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.657029] env[61473]: DEBUG oslo_concurrency.lockutils [req-1fd819d3-e91e-4c39-a4bd-dddb4a2b8110 req-4c5444a3-569c-4d00-877e-97c9b71ad75e service nova] Releasing lock "refresh_cache-f0aae1f8-14cb-4fd5-900b-1b062c7f6783" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.660444] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281513, 'name': CreateVM_Task, 'duration_secs': 0.382841} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.660618] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 683.661291] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.661527] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.661740] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 683.661995] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1913b9eb-bc8e-47d7-8e96-ba17d76f28c8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.668286] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 683.668286] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]523c2065-26a1-6190-cca1-629522976623" [ 683.668286] env[61473]: _type = "Task" [ 683.668286] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.680926] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]523c2065-26a1-6190-cca1-629522976623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.768893] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c073d5-33c0-49ab-b437-df909d51e08c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.778282] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d5889f-6054-42c4-abb7-90a963e2d0f3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.811681] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270fc05b-a6b3-4ca4-a87f-eec9d8ef7e5d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.819280] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ace9220-4ede-438b-bc5d-be3f3125c573 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.832802] env[61473]: DEBUG nova.compute.provider_tree [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.843770] env[61473]: DEBUG nova.scheduler.client.report [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.861677] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.862261] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 683.901460] env[61473]: DEBUG nova.compute.utils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 683.905360] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 683.905594] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 683.916827] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 683.986842] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 684.017814] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 684.018079] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 684.018242] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.018425] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 684.018638] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.018759] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 684.018903] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 684.019361] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 684.019444] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 684.019594] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 684.019859] env[61473]: DEBUG nova.virt.hardware [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 684.021012] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50fb0af-aac0-4963-b319-241f30dcb79d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.029591] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec484e92-e80a-46e6-880c-4414ef080ae5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.115206] env[61473]: DEBUG nova.policy [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4e03ca432054866b1defb60a2d1a8d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e19e7fc95e94ca7afc29a38b935617e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 684.178967] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.179231] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.179392] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.389051] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Successfully created port: a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.071585] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Received event network-vif-plugged-39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 685.072069] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.072069] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.075578] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.075578] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] No waiting events found dispatching network-vif-plugged-39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 685.075578] env[61473]: WARNING nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Received unexpected event network-vif-plugged-39689a89-cd1a-49c1-9960-b462a5c81961 for instance with vm_state building and task_state spawning. [ 685.075578] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Received event network-changed-39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 685.075882] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Refreshing instance network info cache due to event network-changed-39689a89-cd1a-49c1-9960-b462a5c81961. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 685.075882] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.075882] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquired lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.075882] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Refreshing network info cache for port 39689a89-cd1a-49c1-9960-b462a5c81961 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 685.478227] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Successfully created port: 99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.361294] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Updated VIF entry in instance network info cache for port 39689a89-cd1a-49c1-9960-b462a5c81961. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 686.361675] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Updating instance_info_cache with network_info: [{"id": "39689a89-cd1a-49c1-9960-b462a5c81961", "address": "fa:16:3e:df:fb:34", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.132", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39689a89-cd", "ovs_interfaceid": "39689a89-cd1a-49c1-9960-b462a5c81961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.387294] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Releasing lock "refresh_cache-28261a41-7f6d-495c-abbd-7f73f67e80d6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.388106] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Received event network-vif-plugged-87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 686.388106] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.388106] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.388106] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.388265] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] No waiting events found dispatching network-vif-plugged-87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 686.388433] env[61473]: WARNING nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Received unexpected event network-vif-plugged-87afae59-2053-45fe-99c0-071f8dd27793 for instance with vm_state building and task_state spawning. [ 686.389338] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Received event network-changed-87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 686.389338] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Refreshing instance network info cache due to event network-changed-87afae59-2053-45fe-99c0-071f8dd27793. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 686.389338] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.389338] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquired lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.389338] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Refreshing network info cache for port 87afae59-2053-45fe-99c0-071f8dd27793 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 686.560101] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.560101] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.580291] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 686.669804] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.670125] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.671963] env[61473]: INFO nova.compute.claims [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.973278] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04474338-98f8-4b0e-9ba1-35aeb34a50a8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.983339] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b119a710-e763-452f-b5e1-6e5f0d208dd6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.019387] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d569f30a-1afe-417c-b56a-8f0cea66ab5e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.027092] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9ab8d7-94ac-4418-88a7-cf55bc5fa539 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.044194] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "c6880758-25cf-4078-9455-827db6fb6435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.049760] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.049760] env[61473]: DEBUG nova.compute.provider_tree [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.059531] env[61473]: DEBUG nova.scheduler.client.report [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.080456] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.409s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.080456] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 687.132286] env[61473]: DEBUG nova.compute.utils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.133614] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 687.133780] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.150798] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 687.256025] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 687.286952] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.286952] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.286952] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.287199] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.287199] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.287199] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.287199] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.287199] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.287415] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.287675] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.288155] env[61473]: DEBUG nova.virt.hardware [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.288728] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaab665a-888d-437b-9d15-a109a9d774ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.297195] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241ef433-aa82-4fe5-a89a-dd2442bf1724 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.508655] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Updated VIF entry in instance network info cache for port 87afae59-2053-45fe-99c0-071f8dd27793. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 687.508655] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Updating instance_info_cache with network_info: [{"id": "87afae59-2053-45fe-99c0-071f8dd27793", "address": "fa:16:3e:50:6e:0a", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87afae59-20", "ovs_interfaceid": "87afae59-2053-45fe-99c0-071f8dd27793", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.526185] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Releasing lock "refresh_cache-079f1dc7-232a-4e21-9b0e-9fff2d16bab6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.526185] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Received event network-vif-plugged-8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 687.526185] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.526185] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.526479] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.526479] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] No waiting events found dispatching network-vif-plugged-8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 687.526479] env[61473]: WARNING nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Received unexpected event network-vif-plugged-8c86498c-1c65-4c5b-b786-6fe92233129a for instance with vm_state building and task_state spawning. [ 687.526479] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Received event network-changed-8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 687.526637] env[61473]: DEBUG nova.compute.manager [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Refreshing instance network info cache due to event network-changed-8c86498c-1c65-4c5b-b786-6fe92233129a. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 687.526637] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquiring lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.526637] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Acquired lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.526637] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Refreshing network info cache for port 8c86498c-1c65-4c5b-b786-6fe92233129a {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.541135] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Successfully updated port: a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.557573] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.557573] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquired lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.557573] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 687.620697] env[61473]: DEBUG nova.policy [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6668d587e133408d97fa932fb8cce8cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db461a271b874fc8af53d06cfc19dd64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.690042] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 688.660320] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Updating instance_info_cache with network_info: [{"id": "a29a964d-9929-4324-81de-e9bd1f83b841", "address": "fa:16:3e:20:30:81", "network": {"id": "fe484444-1fc8-493e-bdec-2d06f21c2565", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-166180303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f7852f1d30c4031b9e98dc36cf1a10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa29a964d-99", "ovs_interfaceid": "a29a964d-9929-4324-81de-e9bd1f83b841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.671652] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Releasing lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.671950] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance network_info: |[{"id": "a29a964d-9929-4324-81de-e9bd1f83b841", "address": "fa:16:3e:20:30:81", "network": {"id": "fe484444-1fc8-493e-bdec-2d06f21c2565", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-166180303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f7852f1d30c4031b9e98dc36cf1a10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa29a964d-99", "ovs_interfaceid": "a29a964d-9929-4324-81de-e9bd1f83b841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 688.672399] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:30:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a29a964d-9929-4324-81de-e9bd1f83b841', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.682507] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Creating folder: Project (2f7852f1d30c4031b9e98dc36cf1a10c). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.683374] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0f1a268-4b22-4bc5-b398-f9f3d8025555 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.694498] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Created folder: Project (2f7852f1d30c4031b9e98dc36cf1a10c) in parent group-v843485. [ 688.694761] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Creating folder: Instances. Parent ref: group-v843507. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.694995] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94ff0868-0414-4103-b940-f842ce4cfb83 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.704226] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Created folder: Instances in parent group-v843507. [ 688.704374] env[61473]: DEBUG oslo.service.loopingcall [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.704790] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 688.704998] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-503a97c1-122e-4b6f-8982-37a9da9dd91a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.729139] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.729139] env[61473]: value = "task-4281516" [ 688.729139] env[61473]: _type = "Task" [ 688.729139] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.738312] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281516, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.819151] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Updated VIF entry in instance network info cache for port 8c86498c-1c65-4c5b-b786-6fe92233129a. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 688.819516] env[61473]: DEBUG nova.network.neutron [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Updating instance_info_cache with network_info: [{"id": "8c86498c-1c65-4c5b-b786-6fe92233129a", "address": "fa:16:3e:fa:3e:19", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c86498c-1c", "ovs_interfaceid": "8c86498c-1c65-4c5b-b786-6fe92233129a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.834151] env[61473]: DEBUG oslo_concurrency.lockutils [req-d200c455-3225-4d69-9bb4-592430952e74 req-16f4164f-8220-45fd-935c-de1524792918 service nova] Releasing lock "refresh_cache-5d67907c-7199-4734-a5cc-4466703eaa51" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.879551] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Successfully updated port: 99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.902930] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.903096] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.903250] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 689.001457] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.110180] env[61473]: DEBUG nova.compute.manager [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Received event network-vif-plugged-a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 689.112775] env[61473]: DEBUG oslo_concurrency.lockutils [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] Acquiring lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.112775] env[61473]: DEBUG oslo_concurrency.lockutils [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.112775] env[61473]: DEBUG oslo_concurrency.lockutils [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.112775] env[61473]: DEBUG nova.compute.manager [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] No waiting events found dispatching network-vif-plugged-a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 689.113234] env[61473]: WARNING nova.compute.manager [req-0325c1f1-7ec0-4a3e-8c4a-d858ca277b3a req-2c3000e3-358c-410f-8346-054643c2336a service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Received unexpected event network-vif-plugged-a29a964d-9929-4324-81de-e9bd1f83b841 for instance with vm_state building and task_state spawning. [ 689.148732] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Successfully created port: d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.241521] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281516, 'name': CreateVM_Task, 'duration_secs': 0.335928} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.241690] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 689.244775] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.244775] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.244775] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 689.244775] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f68a55-957b-4299-a366-1ee5e0e36285 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.250738] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for the task: (returnval){ [ 689.250738] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]524815e7-ef18-966b-ddd9-04cf892dfb9b" [ 689.250738] env[61473]: _type = "Task" [ 689.250738] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.259991] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]524815e7-ef18-966b-ddd9-04cf892dfb9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.544541] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Updating instance_info_cache with network_info: [{"id": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "address": "fa:16:3e:59:6a:f3", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99f2bc54-f8", "ovs_interfaceid": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.575444] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.575760] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance network_info: |[{"id": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "address": "fa:16:3e:59:6a:f3", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99f2bc54-f8", "ovs_interfaceid": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 689.576195] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:6a:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99f2bc54-f8a8-410e-a336-6f37bc23af30', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.584905] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating folder: Project (5e19e7fc95e94ca7afc29a38b935617e). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.585554] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-307cdcb0-394f-4cce-92ea-4fde568b0634 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.599384] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created folder: Project (5e19e7fc95e94ca7afc29a38b935617e) in parent group-v843485. [ 689.599580] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating folder: Instances. Parent ref: group-v843510. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.599814] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2099f621-2903-4f87-80c0-42c5a0a116ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.609779] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created folder: Instances in parent group-v843510. [ 689.610040] env[61473]: DEBUG oslo.service.loopingcall [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.610040] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 689.610213] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5600c920-ebf6-4522-9390-fa818d9e3daa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.642876] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.642876] env[61473]: value = "task-4281519" [ 689.642876] env[61473]: _type = "Task" [ 689.642876] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.653648] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281519, 'name': CreateVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.770591] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.770968] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.771185] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.153853] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281519, 'name': CreateVM_Task, 'duration_secs': 0.449424} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.154047] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 690.155024] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.155024] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.155505] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 690.156454] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23272657-c76b-4512-888b-58843ab9ea4b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.161923] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 690.161923] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52da726e-8be2-7759-123a-49d12cc7c531" [ 690.161923] env[61473]: _type = "Task" [ 690.161923] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.171481] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52da726e-8be2-7759-123a-49d12cc7c531, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.674027] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.674027] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.674027] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.245322] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Successfully updated port: d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.262890] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.264583] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquired lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.264583] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.589963] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.922269] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Updating instance_info_cache with network_info: [{"id": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "address": "fa:16:3e:04:8b:24", "network": {"id": "db08513f-9b7a-474e-b355-4f1ee357df51", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1320382329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db461a271b874fc8af53d06cfc19dd64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd800497c-bb", "ovs_interfaceid": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.937581] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Releasing lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.937877] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance network_info: |[{"id": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "address": "fa:16:3e:04:8b:24", "network": {"id": "db08513f-9b7a-474e-b355-4f1ee357df51", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1320382329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db461a271b874fc8af53d06cfc19dd64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd800497c-bb", "ovs_interfaceid": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 691.938376] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:8b:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd800497c-bb1a-4db1-acc7-9d56f26ef90a', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.947824] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Creating folder: Project (db461a271b874fc8af53d06cfc19dd64). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 691.948348] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23bdf9aa-5ef3-4b92-b1ae-37ecc01f3b06 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.959675] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Created folder: Project (db461a271b874fc8af53d06cfc19dd64) in parent group-v843485. [ 691.960082] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Creating folder: Instances. Parent ref: group-v843513. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 691.960492] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3aae3a9-1c95-499f-9664-bf3a3197eaf6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.972417] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Created folder: Instances in parent group-v843513. [ 691.972662] env[61473]: DEBUG oslo.service.loopingcall [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.974223] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 691.974223] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c838d3d-acc1-4d4e-8be4-be338a7de92d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.999470] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.999470] env[61473]: value = "task-4281522" [ 691.999470] env[61473]: _type = "Task" [ 691.999470] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.011470] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281522, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.229794] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Received event network-changed-a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 692.230199] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Refreshing instance network info cache due to event network-changed-a29a964d-9929-4324-81de-e9bd1f83b841. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 692.230462] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquiring lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.230610] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquired lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.230774] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Refreshing network info cache for port a29a964d-9929-4324-81de-e9bd1f83b841 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 692.512435] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281522, 'name': CreateVM_Task, 'duration_secs': 0.323339} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.512435] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 692.513425] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.513425] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.513425] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 692.513613] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6378339-ede4-4301-862c-075e64515407 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.521760] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for the task: (returnval){ [ 692.521760] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52395618-4cc3-0099-a82d-537b9c525520" [ 692.521760] env[61473]: _type = "Task" [ 692.521760] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.530928] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52395618-4cc3-0099-a82d-537b9c525520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.853232] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Updated VIF entry in instance network info cache for port a29a964d-9929-4324-81de-e9bd1f83b841. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 692.853513] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Updating instance_info_cache with network_info: [{"id": "a29a964d-9929-4324-81de-e9bd1f83b841", "address": "fa:16:3e:20:30:81", "network": {"id": "fe484444-1fc8-493e-bdec-2d06f21c2565", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-166180303-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f7852f1d30c4031b9e98dc36cf1a10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa29a964d-99", "ovs_interfaceid": "a29a964d-9929-4324-81de-e9bd1f83b841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.867117] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Releasing lock "refresh_cache-921c348d-b2ed-4a9c-b2cf-bdac15ebff67" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.867660] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Received event network-vif-plugged-99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 692.868667] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquiring lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.868667] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.868667] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.868821] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] No waiting events found dispatching network-vif-plugged-99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 692.869034] env[61473]: WARNING nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Received unexpected event network-vif-plugged-99f2bc54-f8a8-410e-a336-6f37bc23af30 for instance with vm_state building and task_state spawning. [ 692.869243] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Received event network-changed-99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 692.869385] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Refreshing instance network info cache due to event network-changed-99f2bc54-f8a8-410e-a336-6f37bc23af30. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 692.869559] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquiring lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.869700] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquired lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.869854] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Refreshing network info cache for port 99f2bc54-f8a8-410e-a336-6f37bc23af30 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 693.036043] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.036043] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.036429] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.808910] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "d9395a72-994b-4baf-a296-2fc3d05a239c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.809261] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.875251] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Updated VIF entry in instance network info cache for port 99f2bc54-f8a8-410e-a336-6f37bc23af30. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 693.875251] env[61473]: DEBUG nova.network.neutron [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Updating instance_info_cache with network_info: [{"id": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "address": "fa:16:3e:59:6a:f3", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99f2bc54-f8", "ovs_interfaceid": "99f2bc54-f8a8-410e-a336-6f37bc23af30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.889718] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "e28da414-8fb8-4470-873a-a285925dd988" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.890375] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.893076] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Releasing lock "refresh_cache-3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.893312] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Received event network-vif-plugged-d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 693.893495] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Acquiring lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.893682] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.893839] env[61473]: DEBUG oslo_concurrency.lockutils [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.894006] env[61473]: DEBUG nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] No waiting events found dispatching network-vif-plugged-d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 693.894175] env[61473]: WARNING nova.compute.manager [req-84ed8fee-d415-478c-ba57-1e131f6728bf req-43a9a578-6794-4d58-abc3-976ef95b5658 service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Received unexpected event network-vif-plugged-d800497c-bb1a-4db1-acc7-9d56f26ef90a for instance with vm_state building and task_state spawning. [ 695.325846] env[61473]: DEBUG nova.compute.manager [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Received event network-changed-d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 695.326124] env[61473]: DEBUG nova.compute.manager [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Refreshing instance network info cache due to event network-changed-d800497c-bb1a-4db1-acc7-9d56f26ef90a. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 695.326320] env[61473]: DEBUG oslo_concurrency.lockutils [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] Acquiring lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.326403] env[61473]: DEBUG oslo_concurrency.lockutils [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] Acquired lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.326558] env[61473]: DEBUG nova.network.neutron [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Refreshing network info cache for port d800497c-bb1a-4db1-acc7-9d56f26ef90a {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 696.107395] env[61473]: DEBUG nova.network.neutron [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Updated VIF entry in instance network info cache for port d800497c-bb1a-4db1-acc7-9d56f26ef90a. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 696.108801] env[61473]: DEBUG nova.network.neutron [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Updating instance_info_cache with network_info: [{"id": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "address": "fa:16:3e:04:8b:24", "network": {"id": "db08513f-9b7a-474e-b355-4f1ee357df51", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1320382329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db461a271b874fc8af53d06cfc19dd64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd800497c-bb", "ovs_interfaceid": "d800497c-bb1a-4db1-acc7-9d56f26ef90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.124298] env[61473]: DEBUG oslo_concurrency.lockutils [req-41f4e9c9-6ca2-434c-89de-b0878e01b3cc req-5bda2803-7609-4a7d-8fbe-13443c3ba05f service nova] Releasing lock "refresh_cache-f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.360091] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.360849] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.224613] env[61473]: DEBUG oslo_concurrency.lockutils [None req-20e25c58-3ea2-4b25-a762-2db8467f13d9 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Acquiring lock "0a233986-dc15-431b-bf1b-58bbb14c9965" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.224941] env[61473]: DEBUG oslo_concurrency.lockutils [None req-20e25c58-3ea2-4b25-a762-2db8467f13d9 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Lock "0a233986-dc15-431b-bf1b-58bbb14c9965" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.364060] env[61473]: DEBUG oslo_concurrency.lockutils [None req-441dffd4-24ef-420a-a5dd-bc062b9e0196 tempest-AttachInterfacesUnderV243Test-235916781 tempest-AttachInterfacesUnderV243Test-235916781-project-member] Acquiring lock "ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.365130] env[61473]: DEBUG oslo_concurrency.lockutils [None req-441dffd4-24ef-420a-a5dd-bc062b9e0196 tempest-AttachInterfacesUnderV243Test-235916781 tempest-AttachInterfacesUnderV243Test-235916781-project-member] Lock "ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.687987] env[61473]: DEBUG oslo_concurrency.lockutils [None req-422c6e8c-7a92-4808-a265-994264170469 tempest-ServersWithSpecificFlavorTestJSON-1210934305 tempest-ServersWithSpecificFlavorTestJSON-1210934305-project-member] Acquiring lock "28b6c493-6d01-475d-818a-93540528a3f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.687987] env[61473]: DEBUG oslo_concurrency.lockutils [None req-422c6e8c-7a92-4808-a265-994264170469 tempest-ServersWithSpecificFlavorTestJSON-1210934305 tempest-ServersWithSpecificFlavorTestJSON-1210934305-project-member] Lock "28b6c493-6d01-475d-818a-93540528a3f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.102518] env[61473]: DEBUG oslo_concurrency.lockutils [None req-15f78cc6-f235-401f-a824-1a4abf72c058 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Acquiring lock "f1938744-dd32-4992-9cf9-53d81491e4a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.102758] env[61473]: DEBUG oslo_concurrency.lockutils [None req-15f78cc6-f235-401f-a824-1a4abf72c058 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Lock "f1938744-dd32-4992-9cf9-53d81491e4a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.741044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0748c056-d9a2-43ae-ba7a-b91024690631 tempest-InstanceActionsV221TestJSON-2016883952 tempest-InstanceActionsV221TestJSON-2016883952-project-member] Acquiring lock "e6071ce1-467f-4082-b885-adb4555634d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.741379] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0748c056-d9a2-43ae-ba7a-b91024690631 tempest-InstanceActionsV221TestJSON-2016883952 tempest-InstanceActionsV221TestJSON-2016883952-project-member] Lock "e6071ce1-467f-4082-b885-adb4555634d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.247641] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9b5cf1e7-09b4-4a62-b3e6-cb223110eeab tempest-ImagesOneServerNegativeTestJSON-902804932 tempest-ImagesOneServerNegativeTestJSON-902804932-project-member] Acquiring lock "23c4d824-ec68-42ad-b50d-ee33d8c833a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.247957] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9b5cf1e7-09b4-4a62-b3e6-cb223110eeab tempest-ImagesOneServerNegativeTestJSON-902804932 tempest-ImagesOneServerNegativeTestJSON-902804932-project-member] Lock "23c4d824-ec68-42ad-b50d-ee33d8c833a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.178811] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b81bdafd-ec66-46de-9ef1-97c365d98593 tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Acquiring lock "f5079713-8e14-41b8-84db-2b599f6e136e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.179079] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b81bdafd-ec66-46de-9ef1-97c365d98593 tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Lock "f5079713-8e14-41b8-84db-2b599f6e136e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.580109] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a11e4869-78a5-4f81-970e-ea8041232616 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Acquiring lock "72a6972e-a183-4629-ba23-08135882ea29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.580389] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a11e4869-78a5-4f81-970e-ea8041232616 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "72a6972e-a183-4629-ba23-08135882ea29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.356269] env[61473]: DEBUG oslo_concurrency.lockutils [None req-639b609c-075e-4de1-9107-97fff389e406 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Acquiring lock "2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.356269] env[61473]: DEBUG oslo_concurrency.lockutils [None req-639b609c-075e-4de1-9107-97fff389e406 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.509480] env[61473]: DEBUG oslo_concurrency.lockutils [None req-87aff8fb-562b-4871-b615-9eb74fd41ee8 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Acquiring lock "762e77db-04ae-474e-8f6e-e6b8c81ecf47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.509794] env[61473]: DEBUG oslo_concurrency.lockutils [None req-87aff8fb-562b-4871-b615-9eb74fd41ee8 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "762e77db-04ae-474e-8f6e-e6b8c81ecf47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.471063] env[61473]: DEBUG oslo_concurrency.lockutils [None req-77c82baa-9e79-405c-bae4-57f971518b0c tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Acquiring lock "9e023463-2573-4518-b6a4-cb1bd3bc0224" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.471063] env[61473]: DEBUG oslo_concurrency.lockutils [None req-77c82baa-9e79-405c-bae4-57f971518b0c tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "9e023463-2573-4518-b6a4-cb1bd3bc0224" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.520436] env[61473]: WARNING oslo_vmware.rw_handles [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 725.520436] env[61473]: ERROR oslo_vmware.rw_handles [ 725.520436] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 725.524032] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 725.524032] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Copying Virtual Disk [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/ce2a0170-1123-4a72-9373-5dfb0ade14f8/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 725.524211] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d152c92-04a7-42cd-af74-53b20034fafa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.536620] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Waiting for the task: (returnval){ [ 725.536620] env[61473]: value = "task-4281523" [ 725.536620] env[61473]: _type = "Task" [ 725.536620] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.546303] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Task: {'id': task-4281523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.859290] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bc38ebaf-680a-4a61-99f8-c4385638579f tempest-FloatingIPsAssociationNegativeTestJSON-293916846 tempest-FloatingIPsAssociationNegativeTestJSON-293916846-project-member] Acquiring lock "e2c7e712-922a-4fc9-882d-03c425fbdf4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.859621] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bc38ebaf-680a-4a61-99f8-c4385638579f tempest-FloatingIPsAssociationNegativeTestJSON-293916846 tempest-FloatingIPsAssociationNegativeTestJSON-293916846-project-member] Lock "e2c7e712-922a-4fc9-882d-03c425fbdf4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.052027] env[61473]: DEBUG oslo_vmware.exceptions [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 726.052027] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.054203] env[61473]: ERROR nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 726.054203] env[61473]: Faults: ['InvalidArgument'] [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Traceback (most recent call last): [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] yield resources [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self.driver.spawn(context, instance, image_meta, [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self._fetch_image_if_missing(context, vi) [ 726.054203] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] image_cache(vi, tmp_image_ds_loc) [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] vm_util.copy_virtual_disk( [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] session._wait_for_task(vmdk_copy_task) [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return self.wait_for_task(task_ref) [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return evt.wait() [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] result = hub.switch() [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 726.054621] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return self.greenlet.switch() [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self.f(*self.args, **self.kw) [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] raise exceptions.translate_fault(task_info.error) [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Faults: ['InvalidArgument'] [ 726.055032] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] [ 726.055032] env[61473]: INFO nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Terminating instance [ 726.056496] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.056703] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.057366] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 726.057577] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 726.057797] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-835bf9ad-d079-4347-8092-a20c9e029870 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.061741] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ea262c-adc6-405e-8830-ec8dd19144a9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.071457] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 726.073805] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac4c8ee6-68ee-4fde-8dc0-d84e483a7b06 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.076115] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.076349] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 726.077408] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef2e4881-ea57-4704-933c-037d2e3dd385 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.083696] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Waiting for the task: (returnval){ [ 726.083696] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]523fffa1-6f0b-fde9-65b2-ebcc995b0a8b" [ 726.083696] env[61473]: _type = "Task" [ 726.083696] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.092431] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]523fffa1-6f0b-fde9-65b2-ebcc995b0a8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.157919] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 726.161807] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 726.161807] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Deleting the datastore file [datastore2] 4703897a-ce8c-4bca-89e9-62fe53c5d404 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.161807] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d03ae2d-25f1-4668-a36b-b6dc6b546582 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.170750] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Waiting for the task: (returnval){ [ 726.170750] env[61473]: value = "task-4281525" [ 726.170750] env[61473]: _type = "Task" [ 726.170750] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.184272] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Task: {'id': task-4281525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.598656] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 726.599106] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating directory with path [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.599300] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c59e5c04-fc87-460c-a045-437ee0e8f969 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.620880] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Created directory with path [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.621106] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Fetch image to [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 726.621277] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 726.623045] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a226da7-4f7c-4cb3-a730-811bc6c50775 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.632759] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce11536-3751-4a18-8f9c-e2b975e15144 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.643023] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46058a45-7da4-4007-9514-482a22ca75c6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.685508] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8539ec4b-3822-498a-8940-6229ccc4ad8d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.693291] env[61473]: DEBUG oslo_vmware.api [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Task: {'id': task-4281525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070084} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.694990] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.695207] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 726.695386] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 726.695788] env[61473]: INFO nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Took 0.64 seconds to destroy the instance on the hypervisor. [ 726.697712] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7499179f-79ab-419b-9c3b-137b70ce3b63 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.701495] env[61473]: DEBUG nova.compute.claims [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.701495] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.701669] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.726754] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 726.807045] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 726.870015] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 726.870177] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 727.257750] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a411c3-e0da-4e97-8a53-b201c483e339 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.267517] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f31b4c1-bf52-4101-acd9-07a0de983914 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.305225] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0fbe57-f24a-4c5b-9e83-395f441c1827 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.313432] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb583b-b212-4dc7-8dc0-c603b64dd44d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.330593] env[61473]: DEBUG nova.compute.provider_tree [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.343314] env[61473]: DEBUG nova.scheduler.client.report [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.369932] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.668s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.370848] env[61473]: ERROR nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 727.370848] env[61473]: Faults: ['InvalidArgument'] [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Traceback (most recent call last): [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self.driver.spawn(context, instance, image_meta, [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self._fetch_image_if_missing(context, vi) [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] image_cache(vi, tmp_image_ds_loc) [ 727.370848] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] vm_util.copy_virtual_disk( [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] session._wait_for_task(vmdk_copy_task) [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return self.wait_for_task(task_ref) [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return evt.wait() [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] result = hub.switch() [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] return self.greenlet.switch() [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 727.371256] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] self.f(*self.args, **self.kw) [ 727.371597] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 727.371597] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] raise exceptions.translate_fault(task_info.error) [ 727.371597] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 727.371597] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Faults: ['InvalidArgument'] [ 727.371597] env[61473]: ERROR nova.compute.manager [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] [ 727.372609] env[61473]: DEBUG nova.compute.utils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.377471] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Build of instance 4703897a-ce8c-4bca-89e9-62fe53c5d404 was re-scheduled: A specified parameter was not correct: fileType [ 727.377471] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 727.377911] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 727.378572] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 727.378644] env[61473]: DEBUG nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 727.378993] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 727.963554] env[61473]: DEBUG nova.network.neutron [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.985082] env[61473]: INFO nova.compute.manager [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] Took 0.61 seconds to deallocate network for instance. [ 728.154235] env[61473]: INFO nova.scheduler.client.report [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Deleted allocations for instance 4703897a-ce8c-4bca-89e9-62fe53c5d404 [ 728.197804] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c6409f1-d0fc-40b0-b3b0-ce03723d9f2c tempest-ServerDiagnosticsNegativeTest-1094809117 tempest-ServerDiagnosticsNegativeTest-1094809117-project-member] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.464s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.200925] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 46.005s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.200925] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4703897a-ce8c-4bca-89e9-62fe53c5d404] During sync_power_state the instance has a pending task (spawning). Skip. [ 728.200925] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "4703897a-ce8c-4bca-89e9-62fe53c5d404" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.235389] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 728.345979] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.346372] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.348076] env[61473]: INFO nova.compute.claims [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.957877] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104b205d-e9bb-4bb9-9f54-b2cc262ac08c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.967818] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18a348a-43d4-4bbf-8c78-51c80b54e307 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.015017] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21af733a-5d68-483a-9bbc-955cdb0a0da8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.022483] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9944818c-ed39-4568-ae39-aa7a02c0d688 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.037295] env[61473]: DEBUG nova.compute.provider_tree [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.054292] env[61473]: DEBUG nova.scheduler.client.report [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.075653] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.728s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.075653] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 729.129377] env[61473]: DEBUG nova.compute.utils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.131335] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 729.131682] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 729.224137] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 729.338970] env[61473]: DEBUG nova.policy [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4e03ca432054866b1defb60a2d1a8d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e19e7fc95e94ca7afc29a38b935617e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 729.433409] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 729.521134] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 729.521134] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 729.521134] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.521292] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 729.521292] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.521292] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 729.521292] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 729.521489] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 729.521694] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 729.521888] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 729.522108] env[61473]: DEBUG nova.virt.hardware [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 729.523095] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedf0775-0184-4158-8469-b9f17419965c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.532943] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7729e2-a2ea-4eff-809b-a5b0532648f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.542667] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "3a350a34-7728-493f-a737-7a6a3071363e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.543038] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.598497] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "8f80e386-439c-456e-a4ad-d643de6ae1b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.598733] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "8f80e386-439c-456e-a4ad-d643de6ae1b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.030308] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Successfully created port: fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.505064] env[61473]: DEBUG oslo_concurrency.lockutils [None req-90e8ba48-ad82-4613-8955-992f31aced7d tempest-ServerActionsTestOtherB-1298047411 tempest-ServerActionsTestOtherB-1298047411-project-member] Acquiring lock "5a356b28-fa9a-4fe6-ab01-e5576d802e8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.505064] env[61473]: DEBUG oslo_concurrency.lockutils [None req-90e8ba48-ad82-4613-8955-992f31aced7d tempest-ServerActionsTestOtherB-1298047411 tempest-ServerActionsTestOtherB-1298047411-project-member] Lock "5a356b28-fa9a-4fe6-ab01-e5576d802e8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.648693] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4da6b0dc-4923-481d-8362-195e9ac40035 tempest-ImagesNegativeTestJSON-255037913 tempest-ImagesNegativeTestJSON-255037913-project-member] Acquiring lock "19550523-59ec-4891-8695-9939b1baefbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.649019] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4da6b0dc-4923-481d-8362-195e9ac40035 tempest-ImagesNegativeTestJSON-255037913 tempest-ImagesNegativeTestJSON-255037913-project-member] Lock "19550523-59ec-4891-8695-9939b1baefbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.396865] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Successfully updated port: fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.439708] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.440781] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.440781] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.524568] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.018715] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Updating instance_info_cache with network_info: [{"id": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "address": "fa:16:3e:8e:5f:6a", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe76f98e-00", "ovs_interfaceid": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.075535] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.075535] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance network_info: |[{"id": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "address": "fa:16:3e:8e:5f:6a", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe76f98e-00", "ovs_interfaceid": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 734.075776] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:5f:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe76f98e-00f7-4ddf-a6eb-2671f3ac2386', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.086543] env[61473]: DEBUG oslo.service.loopingcall [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.088058] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 734.088332] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5249e4ff-b8b2-4429-8160-cc14eb53b634 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.114858] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.114858] env[61473]: value = "task-4281526" [ 734.114858] env[61473]: _type = "Task" [ 734.114858] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.126647] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281526, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.327445] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e79e260-7d31-4026-a6c2-ab25cdd92c62 tempest-AttachInterfacesV270Test-652163780 tempest-AttachInterfacesV270Test-652163780-project-member] Acquiring lock "0091006b-084b-40e7-8d60-a2b43acc08a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.327695] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e79e260-7d31-4026-a6c2-ab25cdd92c62 tempest-AttachInterfacesV270Test-652163780 tempest-AttachInterfacesV270Test-652163780-project-member] Lock "0091006b-084b-40e7-8d60-a2b43acc08a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.636524] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281526, 'name': CreateVM_Task, 'duration_secs': 0.35883} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.636524] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 734.636524] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.636524] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.636524] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 734.636722] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7349298e-1663-4288-8453-59281a44f1e4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.640516] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 734.640516] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52857493-7f6f-f264-783f-48768fe1d185" [ 734.640516] env[61473]: _type = "Task" [ 734.640516] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.650583] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52857493-7f6f-f264-783f-48768fe1d185, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.154164] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.156139] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.156139] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.961105] env[61473]: DEBUG nova.compute.manager [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Received event network-vif-plugged-fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 735.961414] env[61473]: DEBUG oslo_concurrency.lockutils [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] Acquiring lock "c6880758-25cf-4078-9455-827db6fb6435-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.961533] env[61473]: DEBUG oslo_concurrency.lockutils [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] Lock "c6880758-25cf-4078-9455-827db6fb6435-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.961698] env[61473]: DEBUG oslo_concurrency.lockutils [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] Lock "c6880758-25cf-4078-9455-827db6fb6435-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.962358] env[61473]: DEBUG nova.compute.manager [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] No waiting events found dispatching network-vif-plugged-fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 735.962589] env[61473]: WARNING nova.compute.manager [req-cb2f0098-6517-4abf-bbd6-ca597f0e0cc9 req-9fd6be97-5559-4135-ad2f-ccc086e5a206 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Received unexpected event network-vif-plugged-fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 for instance with vm_state building and task_state spawning. [ 736.025383] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82d9a436-76e8-4b8d-a827-886baff3acfd tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Acquiring lock "8ac20624-5031-495d-bdf0-a5a7938539a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.025932] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82d9a436-76e8-4b8d-a827-886baff3acfd tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Lock "8ac20624-5031-495d-bdf0-a5a7938539a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.255044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b94502f-f249-4d32-b625-ca2bfb0b8053 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "50977f71-35b5-46e5-8096-5725c8053295" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.255459] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b94502f-f249-4d32-b625-ca2bfb0b8053 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "50977f71-35b5-46e5-8096-5725c8053295" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.450946] env[61473]: DEBUG nova.compute.manager [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Received event network-changed-fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 739.451186] env[61473]: DEBUG nova.compute.manager [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Refreshing instance network info cache due to event network-changed-fe76f98e-00f7-4ddf-a6eb-2671f3ac2386. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 739.454438] env[61473]: DEBUG oslo_concurrency.lockutils [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] Acquiring lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.454438] env[61473]: DEBUG oslo_concurrency.lockutils [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] Acquired lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.454438] env[61473]: DEBUG nova.network.neutron [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Refreshing network info cache for port fe76f98e-00f7-4ddf-a6eb-2671f3ac2386 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.857994] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f4ffbba0-f332-429b-b545-4216c2cadc85 tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Acquiring lock "b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.858169] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f4ffbba0-f332-429b-b545-4216c2cadc85 tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Lock "b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.040467] env[61473]: DEBUG nova.network.neutron [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Updated VIF entry in instance network info cache for port fe76f98e-00f7-4ddf-a6eb-2671f3ac2386. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.040467] env[61473]: DEBUG nova.network.neutron [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] [instance: c6880758-25cf-4078-9455-827db6fb6435] Updating instance_info_cache with network_info: [{"id": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "address": "fa:16:3e:8e:5f:6a", "network": {"id": "93b700de-fd0f-49e5-b947-d220c9f22a57", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-994282571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e19e7fc95e94ca7afc29a38b935617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe76f98e-00", "ovs_interfaceid": "fe76f98e-00f7-4ddf-a6eb-2671f3ac2386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.051755] env[61473]: DEBUG oslo_concurrency.lockutils [req-87bd41b2-9fc6-436d-b419-5ff44dd77d96 req-0ab76ee8-98e0-4e18-aeaf-693924214b57 service nova] Releasing lock "refresh_cache-c6880758-25cf-4078-9455-827db6fb6435" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.588360] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.625340] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.625340] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.966244] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.966244] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.966244] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.966244] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 742.962538] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.967589] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.967589] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 742.967589] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 743.000987] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.000987] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.000987] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.000987] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.000987] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001265] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001265] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001265] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001265] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001265] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 743.001428] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 743.001428] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.001428] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 743.018564] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.018564] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.018564] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.018564] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 743.021222] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d880e94d-977e-4a23-9398-543bcc77dda0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.029581] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab6f0d-cca4-4797-a919-ee7221ea3314 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.044351] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3660fb67-69e9-4ce1-9939-3fe8bac16e7a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.051613] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67c6c68-4f3a-4cb2-a039-8a7fb12e94a3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.085481] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180629MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 743.085651] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.085855] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.185544] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a1247fd-0053-444a-bb93-2ff419d9e102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.185716] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f0aae1f8-14cb-4fd5-900b-1b062c7f6783 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.185889] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.185961] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186128] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186293] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dc8b5106-5657-409b-b425-b929c8e893d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186422] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186541] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186654] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.186766] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 743.213439] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.239121] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.254199] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.269332] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0a233986-dc15-431b-bf1b-58bbb14c9965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.282738] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.314646] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28b6c493-6d01-475d-818a-93540528a3f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.330096] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f1938744-dd32-4992-9cf9-53d81491e4a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.344398] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e6071ce1-467f-4082-b885-adb4555634d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.357241] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 23c4d824-ec68-42ad-b50d-ee33d8c833a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.372404] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f5079713-8e14-41b8-84db-2b599f6e136e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.396783] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 72a6972e-a183-4629-ba23-08135882ea29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.409243] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.422111] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 762e77db-04ae-474e-8f6e-e6b8c81ecf47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.447834] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9e023463-2573-4518-b6a4-cb1bd3bc0224 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.464681] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e2c7e712-922a-4fc9-882d-03c425fbdf4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.493935] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.512479] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8f80e386-439c-456e-a4ad-d643de6ae1b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.534481] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a356b28-fa9a-4fe6-ab01-e5576d802e8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.548074] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19550523-59ec-4891-8695-9939b1baefbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.566397] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0091006b-084b-40e7-8d60-a2b43acc08a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.581777] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8ac20624-5031-495d-bdf0-a5a7938539a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.601051] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 50977f71-35b5-46e5-8096-5725c8053295 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.630558] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.630820] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 743.630970] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 744.168487] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410bd7e1-781b-4903-86d6-77b9e801e9e9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.175970] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ae2e33-b49c-4e76-ad60-7a04c8647745 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.208298] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a625d4e-f326-4679-9b93-29d8f24f3d24 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.215905] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78ef242-49a1-47c5-a83f-406dfb54b911 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.231807] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.241045] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 744.262537] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 744.262749] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.177s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.900935] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23d1f0b-e8ba-47dd-a230-a10bf782c727 tempest-VolumesAssistedSnapshotsTest-93010175 tempest-VolumesAssistedSnapshotsTest-93010175-project-member] Acquiring lock "d2946031-980f-4ee9-8818-f4d7584d4e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.901389] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23d1f0b-e8ba-47dd-a230-a10bf782c727 tempest-VolumesAssistedSnapshotsTest-93010175 tempest-VolumesAssistedSnapshotsTest-93010175-project-member] Lock "d2946031-980f-4ee9-8818-f4d7584d4e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.169027] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ec83e-5491-4cdb-be90-186c6affd0da tempest-ServersTestJSON-302007319 tempest-ServersTestJSON-302007319-project-member] Acquiring lock "b7aecc9b-4032-4e15-963e-6cc270af55f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.169300] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ec83e-5491-4cdb-be90-186c6affd0da tempest-ServersTestJSON-302007319 tempest-ServersTestJSON-302007319-project-member] Lock "b7aecc9b-4032-4e15-963e-6cc270af55f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.600441] env[61473]: DEBUG oslo_concurrency.lockutils [None req-61dded22-b485-46ab-943d-412d8f73c280 tempest-ServersNegativeTestMultiTenantJSON-1072421860 tempest-ServersNegativeTestMultiTenantJSON-1072421860-project-member] Acquiring lock "77271f0d-6c43-4ecc-9211-e16c977b8531" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.600790] env[61473]: DEBUG oslo_concurrency.lockutils [None req-61dded22-b485-46ab-943d-412d8f73c280 tempest-ServersNegativeTestMultiTenantJSON-1072421860 tempest-ServersNegativeTestMultiTenantJSON-1072421860-project-member] Lock "77271f0d-6c43-4ecc-9211-e16c977b8531" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.158695] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4aa50229-b6a1-4807-8804-c8ac7cb132c8 tempest-ServerRescueTestJSONUnderV235-876916624 tempest-ServerRescueTestJSONUnderV235-876916624-project-member] Acquiring lock "f081b1bc-1d1f-4b5c-8690-e0186c1a7793" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.158695] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4aa50229-b6a1-4807-8804-c8ac7cb132c8 tempest-ServerRescueTestJSONUnderV235-876916624 tempest-ServerRescueTestJSONUnderV235-876916624-project-member] Lock "f081b1bc-1d1f-4b5c-8690-e0186c1a7793" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.538842] env[61473]: WARNING oslo_vmware.rw_handles [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 775.538842] env[61473]: ERROR oslo_vmware.rw_handles [ 775.539510] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 775.540755] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 775.541082] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Copying Virtual Disk [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/9aa2df78-04c2-4609-8e27-00c6d74b5e71/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 775.542034] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e536cd28-2218-4418-84b1-e7608a544c97 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.551236] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Waiting for the task: (returnval){ [ 775.551236] env[61473]: value = "task-4281527" [ 775.551236] env[61473]: _type = "Task" [ 775.551236] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.559134] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Task: {'id': task-4281527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.063092] env[61473]: DEBUG oslo_vmware.exceptions [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 776.063092] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.063360] env[61473]: ERROR nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 776.063360] env[61473]: Faults: ['InvalidArgument'] [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Traceback (most recent call last): [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] yield resources [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self.driver.spawn(context, instance, image_meta, [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self._fetch_image_if_missing(context, vi) [ 776.063360] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] image_cache(vi, tmp_image_ds_loc) [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] vm_util.copy_virtual_disk( [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] session._wait_for_task(vmdk_copy_task) [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return self.wait_for_task(task_ref) [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return evt.wait() [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] result = hub.switch() [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.063667] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return self.greenlet.switch() [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self.f(*self.args, **self.kw) [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] raise exceptions.translate_fault(task_info.error) [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Faults: ['InvalidArgument'] [ 776.063923] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] [ 776.063923] env[61473]: INFO nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Terminating instance [ 776.065242] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.065446] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.066091] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 776.066283] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 776.066506] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-324adcad-a4c5-4bf4-80f2-4961d4c365a5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.068867] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5967ebbe-ef89-4aaf-bae6-533a93e0cfe9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.076023] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 776.076023] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41a2e9e2-d796-4403-be52-a8b1c1026eaa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.077915] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.078092] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 776.079028] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c41eac9-860d-42c4-9875-1a1499ae9d91 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.083471] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Waiting for the task: (returnval){ [ 776.083471] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]520a85cf-2744-50fd-56d1-359186235dd0" [ 776.083471] env[61473]: _type = "Task" [ 776.083471] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.090659] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]520a85cf-2744-50fd-56d1-359186235dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.142418] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 776.142746] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 776.143074] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Deleting the datastore file [datastore2] f0aae1f8-14cb-4fd5-900b-1b062c7f6783 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.143376] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0e753f4-3e79-4e64-bf0f-e8919abcd070 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.150170] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Waiting for the task: (returnval){ [ 776.150170] env[61473]: value = "task-4281529" [ 776.150170] env[61473]: _type = "Task" [ 776.150170] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.158238] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Task: {'id': task-4281529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.595027] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 776.595027] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Creating directory with path [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.595027] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3a6e841-1fd4-48f7-86a3-a82af406025f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.605903] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Created directory with path [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.606125] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Fetch image to [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 776.606299] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 776.607076] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ceb37e-f353-426d-bef6-882c8cf4ac2a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.614227] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f0e367-05e9-402d-8e1c-7412de3e88fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.623460] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4525c01c-0b38-4972-8e92-c1efe6843832 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.656862] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd37e932-658d-4a21-85f5-9d28f0839678 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.664981] env[61473]: DEBUG oslo_vmware.api [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Task: {'id': task-4281529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069591} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.666415] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.666612] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 776.666824] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 776.667015] env[61473]: INFO nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Took 0.60 seconds to destroy the instance on the hypervisor. [ 776.668787] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ed9b569b-8815-4289-9424-8111a8d745a0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.670737] env[61473]: DEBUG nova.compute.claims [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 776.670917] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.671149] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.700307] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 776.764881] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 776.823967] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 776.824098] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 777.202562] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15de7dbd-8049-4e2a-9935-d4efa851af60 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.210418] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f051fab0-03b5-4bb7-8e3f-40290dc21564 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.239280] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd1104b-9d96-4dea-a847-61488684d90a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.246080] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b7f042-0c4b-4845-8833-d074e7985239 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.258579] env[61473]: DEBUG nova.compute.provider_tree [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.267795] env[61473]: DEBUG nova.scheduler.client.report [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.282931] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.612s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.283489] env[61473]: ERROR nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 777.283489] env[61473]: Faults: ['InvalidArgument'] [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Traceback (most recent call last): [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self.driver.spawn(context, instance, image_meta, [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self._fetch_image_if_missing(context, vi) [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] image_cache(vi, tmp_image_ds_loc) [ 777.283489] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] vm_util.copy_virtual_disk( [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] session._wait_for_task(vmdk_copy_task) [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return self.wait_for_task(task_ref) [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return evt.wait() [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] result = hub.switch() [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] return self.greenlet.switch() [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 777.283804] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] self.f(*self.args, **self.kw) [ 777.284099] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 777.284099] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] raise exceptions.translate_fault(task_info.error) [ 777.284099] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 777.284099] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Faults: ['InvalidArgument'] [ 777.284099] env[61473]: ERROR nova.compute.manager [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] [ 777.284363] env[61473]: DEBUG nova.compute.utils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 777.285975] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Build of instance f0aae1f8-14cb-4fd5-900b-1b062c7f6783 was re-scheduled: A specified parameter was not correct: fileType [ 777.285975] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 777.286506] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 777.286632] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 777.286885] env[61473]: DEBUG nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 777.287198] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 777.648079] env[61473]: DEBUG nova.network.neutron [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.667465] env[61473]: INFO nova.compute.manager [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] Took 0.38 seconds to deallocate network for instance. [ 777.790957] env[61473]: INFO nova.scheduler.client.report [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Deleted allocations for instance f0aae1f8-14cb-4fd5-900b-1b062c7f6783 [ 777.814057] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e5ed95b-12ad-4b21-9e4a-e15c0dab00f2 tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.975s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.815531] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 95.620s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.815887] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f0aae1f8-14cb-4fd5-900b-1b062c7f6783] During sync_power_state the instance has a pending task (spawning). Skip. [ 777.816150] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "f0aae1f8-14cb-4fd5-900b-1b062c7f6783" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.825194] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 777.877639] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.877901] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.879387] env[61473]: INFO nova.compute.claims [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.358585] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc7143d-7d78-4533-96c3-cf59acc7476a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.366430] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a308550b-ed5a-4513-86aa-dfdb3a927a76 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.395923] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03dce13-f524-4b4b-9d1a-b4f44d1f72cb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.402898] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b594a964-df06-4a48-8623-e1e825513098 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.416605] env[61473]: DEBUG nova.compute.provider_tree [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.424594] env[61473]: DEBUG nova.scheduler.client.report [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.440785] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.563s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.441299] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 778.479403] env[61473]: DEBUG nova.compute.utils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 778.480667] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Not allocating networking since 'none' was specified. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 778.490860] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 778.579379] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 778.609025] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 778.609222] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 778.609382] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.609564] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 778.609712] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.609859] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 778.610140] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 778.610352] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 778.610581] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 778.610791] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 778.611027] env[61473]: DEBUG nova.virt.hardware [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 778.611940] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf6b50d-c847-40de-ad9a-20d4eb7155d5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.620212] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb021a99-83ee-470d-acec-e2b4e04a98da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.634014] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance VIF info [] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 778.639709] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Creating folder: Project (33c5896f105c4d9ca6e5cd367900945a). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 778.640049] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48472e25-3d47-4e9b-97c7-41f7627b7161 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.649811] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Created folder: Project (33c5896f105c4d9ca6e5cd367900945a) in parent group-v843485. [ 778.650070] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Creating folder: Instances. Parent ref: group-v843517. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 778.650224] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6ca24d1-4dfa-4896-bfd7-20edd7e9113c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.658845] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Created folder: Instances in parent group-v843517. [ 778.659089] env[61473]: DEBUG oslo.service.loopingcall [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.659274] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 778.659461] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1b4feb4-6cf0-41f9-a48f-9e327fc22d87 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.674985] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 778.674985] env[61473]: value = "task-4281532" [ 778.674985] env[61473]: _type = "Task" [ 778.674985] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.682045] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281532, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.187330] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281532, 'name': CreateVM_Task, 'duration_secs': 0.263799} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.187505] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 779.187976] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.188168] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.188472] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 779.188725] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec042346-6ecf-44f3-b37d-689fa0849915 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.196497] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for the task: (returnval){ [ 779.196497] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52cac87b-0b78-6f0c-3ea4-db7ecf92c7dc" [ 779.196497] env[61473]: _type = "Task" [ 779.196497] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.207294] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52cac87b-0b78-6f0c-3ea4-db7ecf92c7dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.320922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e1677eab-e1da-4b7f-9b3b-48dab26b689f tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Acquiring lock "dd63d5b4-50a7-4dce-8e2f-3eac7d55e424" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.321174] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e1677eab-e1da-4b7f-9b3b-48dab26b689f tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "dd63d5b4-50a7-4dce-8e2f-3eac7d55e424" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.707173] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.707437] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.710806] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.229118] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.229118] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 803.229118] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 803.249350] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.249505] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.249639] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.249767] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.249890] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250149] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250268] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250388] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250504] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 803.250623] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 803.251107] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.251286] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.251461] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.251616] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.251755] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.251915] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.252031] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 803.252176] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.265705] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.265919] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.266098] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.266303] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 803.267520] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8d13e1-8fa4-46d2-a286-cd8ad3d50f7e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.276233] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4052ffeb-fda9-4b15-bfed-ba4b81f5019c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.290187] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168871fd-32b7-45be-a361-a1adb7359b51 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.296295] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e61bec-3667-43d4-875c-85f1adbe1eda {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.324567] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180623MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 803.324672] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.324859] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.396090] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a1247fd-0053-444a-bb93-2ff419d9e102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.396376] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.396562] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.396718] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.396868] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dc8b5106-5657-409b-b425-b929c8e893d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.397022] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.397207] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.397477] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.397647] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.397808] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 803.410578] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.422105] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.432879] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0a233986-dc15-431b-bf1b-58bbb14c9965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.442362] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.452083] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28b6c493-6d01-475d-818a-93540528a3f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.461517] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f1938744-dd32-4992-9cf9-53d81491e4a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.472490] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e6071ce1-467f-4082-b885-adb4555634d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.482686] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 23c4d824-ec68-42ad-b50d-ee33d8c833a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.492196] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f5079713-8e14-41b8-84db-2b599f6e136e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.503027] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 72a6972e-a183-4629-ba23-08135882ea29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.513043] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.523243] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 762e77db-04ae-474e-8f6e-e6b8c81ecf47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.532194] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9e023463-2573-4518-b6a4-cb1bd3bc0224 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.545269] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e2c7e712-922a-4fc9-882d-03c425fbdf4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.556753] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.566409] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8f80e386-439c-456e-a4ad-d643de6ae1b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.575985] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a356b28-fa9a-4fe6-ab01-e5576d802e8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.590229] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19550523-59ec-4891-8695-9939b1baefbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.601602] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0091006b-084b-40e7-8d60-a2b43acc08a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.619102] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8ac20624-5031-495d-bdf0-a5a7938539a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.628134] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 50977f71-35b5-46e5-8096-5725c8053295 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.640145] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.650330] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d2946031-980f-4ee9-8818-f4d7584d4e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.661366] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b7aecc9b-4032-4e15-963e-6cc270af55f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.671767] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 77271f0d-6c43-4ecc-9211-e16c977b8531 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.687147] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f081b1bc-1d1f-4b5c-8690-e0186c1a7793 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.698353] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dd63d5b4-50a7-4dce-8e2f-3eac7d55e424 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.698777] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 803.698930] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 804.134779] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b902b7b4-3c52-4d95-bc8b-2f6b254309e9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.142602] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b8ae94-3a86-4bed-af54-4df50c5e2ef0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.172369] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e60df16-1975-46c5-a02b-cef5d08a21cb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.179700] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ddaab6-9474-42ca-a3ad-7a7dfb30f737 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.192871] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.201972] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.216849] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 804.217032] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.892s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.950298] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 826.518715] env[61473]: WARNING oslo_vmware.rw_handles [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 826.518715] env[61473]: ERROR oslo_vmware.rw_handles [ 826.519463] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 826.520791] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 826.521063] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Copying Virtual Disk [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/69ad6af0-702e-4a94-9e7e-ffd4031d9372/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 826.521352] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad14b4d2-4c06-401b-8dbc-4f52918b72f5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.530224] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Waiting for the task: (returnval){ [ 826.530224] env[61473]: value = "task-4281533" [ 826.530224] env[61473]: _type = "Task" [ 826.530224] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.538198] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Task: {'id': task-4281533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.041880] env[61473]: DEBUG oslo_vmware.exceptions [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 827.042367] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.042753] env[61473]: ERROR nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 827.042753] env[61473]: Faults: ['InvalidArgument'] [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Traceback (most recent call last): [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] yield resources [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self.driver.spawn(context, instance, image_meta, [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self._fetch_image_if_missing(context, vi) [ 827.042753] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] image_cache(vi, tmp_image_ds_loc) [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] vm_util.copy_virtual_disk( [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] session._wait_for_task(vmdk_copy_task) [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return self.wait_for_task(task_ref) [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return evt.wait() [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] result = hub.switch() [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.043125] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return self.greenlet.switch() [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self.f(*self.args, **self.kw) [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] raise exceptions.translate_fault(task_info.error) [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Faults: ['InvalidArgument'] [ 827.043458] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] [ 827.043458] env[61473]: INFO nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Terminating instance [ 827.044595] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.044811] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.045062] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3909bb8-c5d6-4b68-959f-b69ec75e0639 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.047302] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 827.047490] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 827.048504] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf85fe2-eb4e-4dff-840b-dc6f1ea1596f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.055453] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 827.055571] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b797af5a-3264-4e72-9326-45044374ab32 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.057808] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.058011] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 827.059034] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f330e4ac-42d3-45db-a97c-8fafd4dddfd9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.063649] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for the task: (returnval){ [ 827.063649] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52924a4c-a590-3cb8-b425-b9cf24044fc6" [ 827.063649] env[61473]: _type = "Task" [ 827.063649] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.072793] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52924a4c-a590-3cb8-b425-b9cf24044fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.137610] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 827.137840] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 827.138134] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Deleting the datastore file [datastore2] 5a1247fd-0053-444a-bb93-2ff419d9e102 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.138448] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7f145e4-463e-4481-a769-5db5f5a64cfa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.146053] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Waiting for the task: (returnval){ [ 827.146053] env[61473]: value = "task-4281535" [ 827.146053] env[61473]: _type = "Task" [ 827.146053] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.154247] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Task: {'id': task-4281535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.573881] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 827.574194] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Creating directory with path [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.574397] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a3420ef-93a5-4612-9d46-2c014a3ca652 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.593267] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Created directory with path [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.593462] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Fetch image to [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 827.593631] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 827.594395] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a609eeb4-7ba9-4e4c-b9e1-0bf6aff77328 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.602282] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffcc5d1-e1a1-4bbb-a3ff-65962ff3c6b9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.611273] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4f58eb-1c51-4458-a4d6-0b301b6fa263 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.640918] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65effb76-f8d3-4ff1-9dc0-4039279f2d2a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.649081] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75d396bc-68f5-42db-bfa7-c92e2de58d31 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.654867] env[61473]: DEBUG oslo_vmware.api [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Task: {'id': task-4281535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07055} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.655116] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.655301] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 827.655469] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.655639] env[61473]: INFO nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Took 0.61 seconds to destroy the instance on the hypervisor. [ 827.657724] env[61473]: DEBUG nova.compute.claims [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 827.657906] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.658128] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.668666] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 827.729594] env[61473]: DEBUG oslo_vmware.rw_handles [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 827.789801] env[61473]: DEBUG oslo_vmware.rw_handles [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 827.789801] env[61473]: DEBUG oslo_vmware.rw_handles [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 828.166702] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e8a33b-f77b-452d-bed4-3cbf57e51302 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.174612] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58052f7-c3c7-40ea-9f29-4520c2fc3f3c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.205125] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86423a19-b942-42c7-92ba-fe4cff2d50a7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.212559] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090cca30-a3c8-45dd-a03d-eed66f6b0f37 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.225827] env[61473]: DEBUG nova.compute.provider_tree [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.234875] env[61473]: DEBUG nova.scheduler.client.report [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 828.251292] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.593s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.251826] env[61473]: ERROR nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 828.251826] env[61473]: Faults: ['InvalidArgument'] [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Traceback (most recent call last): [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self.driver.spawn(context, instance, image_meta, [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self._fetch_image_if_missing(context, vi) [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] image_cache(vi, tmp_image_ds_loc) [ 828.251826] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] vm_util.copy_virtual_disk( [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] session._wait_for_task(vmdk_copy_task) [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return self.wait_for_task(task_ref) [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return evt.wait() [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] result = hub.switch() [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] return self.greenlet.switch() [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 828.252171] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] self.f(*self.args, **self.kw) [ 828.252507] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 828.252507] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] raise exceptions.translate_fault(task_info.error) [ 828.252507] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 828.252507] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Faults: ['InvalidArgument'] [ 828.252507] env[61473]: ERROR nova.compute.manager [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] [ 828.252640] env[61473]: DEBUG nova.compute.utils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 828.255081] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Build of instance 5a1247fd-0053-444a-bb93-2ff419d9e102 was re-scheduled: A specified parameter was not correct: fileType [ 828.255081] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 828.255207] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 828.255316] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 828.255472] env[61473]: DEBUG nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 828.255631] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 828.809147] env[61473]: DEBUG nova.network.neutron [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.827828] env[61473]: INFO nova.compute.manager [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] Took 0.57 seconds to deallocate network for instance. [ 828.937522] env[61473]: INFO nova.scheduler.client.report [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Deleted allocations for instance 5a1247fd-0053-444a-bb93-2ff419d9e102 [ 828.966906] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f60fd49c-4561-4caf-ac82-d787c2f5d459 tempest-TenantUsagesTestJSON-1570716904 tempest-TenantUsagesTestJSON-1570716904-project-member] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.539s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.970154] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 146.775s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.970388] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5a1247fd-0053-444a-bb93-2ff419d9e102] During sync_power_state the instance has a pending task (spawning). Skip. [ 828.970601] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "5a1247fd-0053-444a-bb93-2ff419d9e102" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.991807] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 829.054739] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.055010] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.056491] env[61473]: INFO nova.compute.claims [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.571971] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f90319-98ac-4bc6-80e2-5a12bbea4025 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.579819] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd36109-ddc8-4c5d-8716-7dd8e5ae1632 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.610487] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1deb1b-3be9-4c70-8f41-43205e52e65d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.617723] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967f5e4c-e3be-46c0-8126-0db81d597bab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.630962] env[61473]: DEBUG nova.compute.provider_tree [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.644631] env[61473]: DEBUG nova.scheduler.client.report [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.658974] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.604s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.659448] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 829.696659] env[61473]: DEBUG nova.compute.utils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 829.698579] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 829.698579] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 829.711152] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 829.777166] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 829.791977] env[61473]: DEBUG nova.policy [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78d5dc51bdff4ac497db3fdaa3ac4339', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72045edaa7784faea6f453f344ee6815', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 829.813309] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.813632] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.813841] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.814113] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.814314] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.814511] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.814805] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.815164] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.815421] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.815667] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.815922] env[61473]: DEBUG nova.virt.hardware [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.817618] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121b22a9-02b6-41c9-b702-4f8b7b3bfc08 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.827686] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe043a3-d79a-46be-84cf-3330b41c7443 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.185975] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Successfully created port: 145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.070601] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Successfully updated port: 145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.082317] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.085020] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquired lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.085020] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 831.154676] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.238208] env[61473]: DEBUG nova.compute.manager [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Received event network-vif-plugged-145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 831.238422] env[61473]: DEBUG oslo_concurrency.lockutils [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] Acquiring lock "e28da414-8fb8-4470-873a-a285925dd988-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.238589] env[61473]: DEBUG oslo_concurrency.lockutils [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] Lock "e28da414-8fb8-4470-873a-a285925dd988-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.238754] env[61473]: DEBUG oslo_concurrency.lockutils [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] Lock "e28da414-8fb8-4470-873a-a285925dd988-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.238916] env[61473]: DEBUG nova.compute.manager [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] No waiting events found dispatching network-vif-plugged-145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.239439] env[61473]: WARNING nova.compute.manager [req-cbc98c8b-9073-4bf9-8741-324df30cc05b req-5b376bf9-15ce-4eda-a415-39d2948e6b9a service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Received unexpected event network-vif-plugged-145b0535-6b47-4f15-8f00-1e78c38ce2e4 for instance with vm_state building and task_state spawning. [ 831.418523] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Updating instance_info_cache with network_info: [{"id": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "address": "fa:16:3e:3b:62:37", "network": {"id": "96fc7821-8b52-4887-8c09-59c107f1fc5c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254752078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72045edaa7784faea6f453f344ee6815", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145b0535-6b", "ovs_interfaceid": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.435537] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Releasing lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.435665] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance network_info: |[{"id": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "address": "fa:16:3e:3b:62:37", "network": {"id": "96fc7821-8b52-4887-8c09-59c107f1fc5c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254752078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72045edaa7784faea6f453f344ee6815", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145b0535-6b", "ovs_interfaceid": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 831.436076] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:62:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '145b0535-6b47-4f15-8f00-1e78c38ce2e4', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.446858] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Creating folder: Project (72045edaa7784faea6f453f344ee6815). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 831.447578] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-533bc367-853e-4f56-9581-0a5de1d95bd0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.521063] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Created folder: Project (72045edaa7784faea6f453f344ee6815) in parent group-v843485. [ 831.521287] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Creating folder: Instances. Parent ref: group-v843520. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 831.521534] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7df5feca-f026-4c34-9e96-f5d2fc78abc7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.531249] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Created folder: Instances in parent group-v843520. [ 831.531516] env[61473]: DEBUG oslo.service.loopingcall [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.531673] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 831.531864] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ee6128e-2b1a-4c70-acd4-a199fbf964ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.550865] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.550865] env[61473]: value = "task-4281538" [ 831.550865] env[61473]: _type = "Task" [ 831.550865] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.558984] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281538, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.061640] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281538, 'name': CreateVM_Task, 'duration_secs': 0.317469} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.061790] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 832.062482] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.062671] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.062991] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.063257] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b6ec99d-e22e-4007-8cd3-43c3e4a347d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.067739] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for the task: (returnval){ [ 832.067739] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52edfd5d-2671-5b69-920e-23d82956b2ac" [ 832.067739] env[61473]: _type = "Task" [ 832.067739] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.076754] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52edfd5d-2671-5b69-920e-23d82956b2ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.580822] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.580822] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.580822] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.562575] env[61473]: DEBUG nova.compute.manager [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Received event network-changed-145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 833.562852] env[61473]: DEBUG nova.compute.manager [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Refreshing instance network info cache due to event network-changed-145b0535-6b47-4f15-8f00-1e78c38ce2e4. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 833.562997] env[61473]: DEBUG oslo_concurrency.lockutils [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] Acquiring lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.563132] env[61473]: DEBUG oslo_concurrency.lockutils [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] Acquired lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.563295] env[61473]: DEBUG nova.network.neutron [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Refreshing network info cache for port 145b0535-6b47-4f15-8f00-1e78c38ce2e4 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.119119] env[61473]: DEBUG nova.network.neutron [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Updated VIF entry in instance network info cache for port 145b0535-6b47-4f15-8f00-1e78c38ce2e4. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 834.119485] env[61473]: DEBUG nova.network.neutron [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] [instance: e28da414-8fb8-4470-873a-a285925dd988] Updating instance_info_cache with network_info: [{"id": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "address": "fa:16:3e:3b:62:37", "network": {"id": "96fc7821-8b52-4887-8c09-59c107f1fc5c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-254752078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72045edaa7784faea6f453f344ee6815", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap145b0535-6b", "ovs_interfaceid": "145b0535-6b47-4f15-8f00-1e78c38ce2e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.129969] env[61473]: DEBUG oslo_concurrency.lockutils [req-8777d9d0-7e2e-44b2-8fa0-da0adf3e3a57 req-51138057-45bc-4e93-bab8-7a98ad270864 service nova] Releasing lock "refresh_cache-e28da414-8fb8-4470-873a-a285925dd988" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.808378] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.808670] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.961683] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.966308] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.966110] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.966351] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.966567] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.966668] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.966815] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 863.966962] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.980588] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.980812] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.980979] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.981156] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 863.982715] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb75e6-436b-4525-9717-27c110c44593 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.991544] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98e85d3-9a58-487a-94c8-de15063e8e3f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.005976] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277e653e-7bf1-4f40-b1e7-c6c1208b461a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.012568] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc19869-a76f-4313-96b9-671c9dd2ef5e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.041710] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180601MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 864.041871] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.042084] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.117938] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118113] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118243] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118362] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dc8b5106-5657-409b-b425-b929c8e893d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118479] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118617] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118754] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118870] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.118980] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.119106] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 864.130491] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.141317] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0a233986-dc15-431b-bf1b-58bbb14c9965 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.151531] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.162382] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 28b6c493-6d01-475d-818a-93540528a3f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.172403] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f1938744-dd32-4992-9cf9-53d81491e4a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.183704] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e6071ce1-467f-4082-b885-adb4555634d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.193181] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 23c4d824-ec68-42ad-b50d-ee33d8c833a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.204496] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f5079713-8e14-41b8-84db-2b599f6e136e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.215852] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 72a6972e-a183-4629-ba23-08135882ea29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.226398] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.236977] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 762e77db-04ae-474e-8f6e-e6b8c81ecf47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.247589] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9e023463-2573-4518-b6a4-cb1bd3bc0224 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.259108] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e2c7e712-922a-4fc9-882d-03c425fbdf4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.269990] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.279852] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8f80e386-439c-456e-a4ad-d643de6ae1b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.290397] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a356b28-fa9a-4fe6-ab01-e5576d802e8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.300404] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19550523-59ec-4891-8695-9939b1baefbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.311274] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0091006b-084b-40e7-8d60-a2b43acc08a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.321097] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8ac20624-5031-495d-bdf0-a5a7938539a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.331256] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 50977f71-35b5-46e5-8096-5725c8053295 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.341011] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.350422] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d2946031-980f-4ee9-8818-f4d7584d4e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.360794] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b7aecc9b-4032-4e15-963e-6cc270af55f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.371674] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 77271f0d-6c43-4ecc-9211-e16c977b8531 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.381771] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f081b1bc-1d1f-4b5c-8690-e0186c1a7793 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.392467] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dd63d5b4-50a7-4dce-8e2f-3eac7d55e424 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.403060] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.403184] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 864.403317] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 864.815188] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedf2ac0-60ed-4cc1-9970-94a49439830d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.823994] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b8f7c7-e291-41c7-985d-61c06632df09 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.856554] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9731ed7a-0258-4f7f-aeee-876a867803dc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.861468] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c31df0-942c-400e-802c-48341617cadb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.874325] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.882700] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.895737] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 864.895922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.854s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.891936] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.891936] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.891936] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 865.891936] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 865.913318] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913318] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913318] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913318] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913318] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 865.913712] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 865.913712] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.805544] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.375322] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.145102] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "5d67907c-7199-4734-a5cc-4466703eaa51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.535766] env[61473]: WARNING oslo_vmware.rw_handles [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 876.535766] env[61473]: ERROR oslo_vmware.rw_handles [ 876.536419] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 876.538128] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 876.538386] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Copying Virtual Disk [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/6453844d-a0dc-4240-b4ac-9535fef25ad7/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 876.538681] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0164816-0f76-4e9b-91ff-8d2075841ff7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.547153] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for the task: (returnval){ [ 876.547153] env[61473]: value = "task-4281539" [ 876.547153] env[61473]: _type = "Task" [ 876.547153] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.555108] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Task: {'id': task-4281539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.058622] env[61473]: DEBUG oslo_vmware.exceptions [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 877.059052] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.059886] env[61473]: ERROR nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 877.059886] env[61473]: Faults: ['InvalidArgument'] [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Traceback (most recent call last): [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] yield resources [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self.driver.spawn(context, instance, image_meta, [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self._fetch_image_if_missing(context, vi) [ 877.059886] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] image_cache(vi, tmp_image_ds_loc) [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] vm_util.copy_virtual_disk( [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] session._wait_for_task(vmdk_copy_task) [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return self.wait_for_task(task_ref) [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return evt.wait() [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] result = hub.switch() [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 877.060252] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return self.greenlet.switch() [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self.f(*self.args, **self.kw) [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] raise exceptions.translate_fault(task_info.error) [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Faults: ['InvalidArgument'] [ 877.060666] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] [ 877.060666] env[61473]: INFO nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Terminating instance [ 877.062874] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.062874] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.063695] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 877.063899] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 877.064176] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8db2404-a418-44b2-8df1-0cccee573021 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.066392] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ceb2e05-eeb7-4ee1-a6d8-7ea586943b19 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.073523] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 877.073523] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2972736f-6268-449c-986f-2e3f3072130c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.075629] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.075802] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 877.076755] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56dc6c8f-e47c-43ed-bc6d-8e946a44242f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.081507] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for the task: (returnval){ [ 877.081507] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]525ab616-96ad-0184-4ec4-fd51ada97874" [ 877.081507] env[61473]: _type = "Task" [ 877.081507] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.088551] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]525ab616-96ad-0184-4ec4-fd51ada97874, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.168754] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 877.168996] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 877.169236] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Deleting the datastore file [datastore2] 28261a41-7f6d-495c-abbd-7f73f67e80d6 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 877.169507] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32c081c5-0cc4-4c9b-ad43-f77bf2a1fbca {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.176338] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for the task: (returnval){ [ 877.176338] env[61473]: value = "task-4281541" [ 877.176338] env[61473]: _type = "Task" [ 877.176338] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.183886] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Task: {'id': task-4281541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.593433] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 877.594235] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Creating directory with path [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.594616] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-577cdded-9b2d-4ecf-afde-31a11cc72498 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.608644] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Created directory with path [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.609016] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Fetch image to [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 877.609444] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 877.614061] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51326536-665d-4a2f-92d7-be86ab2c0896 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.621644] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86191b33-efa0-411a-883a-09d629daac45 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.633629] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031ea87a-cd18-4bc5-b307-7021aedd80e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.673556] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf61c3f2-f9c7-49ef-8028-31f95d0ff973 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.682407] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-781a0c46-3bb2-4f3d-8b94-3b1af60765a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.687575] env[61473]: DEBUG oslo_vmware.api [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Task: {'id': task-4281541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087853} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.692023] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.692023] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 877.692023] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 877.692023] env[61473]: INFO nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Took 0.63 seconds to destroy the instance on the hypervisor. [ 877.692023] env[61473]: DEBUG nova.compute.claims [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 877.692791] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.692791] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.716896] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 877.801959] env[61473]: DEBUG oslo_vmware.rw_handles [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 877.861674] env[61473]: DEBUG oslo_vmware.rw_handles [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 877.861878] env[61473]: DEBUG oslo_vmware.rw_handles [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 878.246031] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf3a023-4a14-4cf2-b890-ed1151ad5586 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.252929] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa73692-75e7-429a-9f75-281ea5b6d0cc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.283500] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc892791-41d4-4895-802d-0fb544008069 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.290999] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81d471c-7eb5-4745-a834-b69cc364b0fa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.304522] env[61473]: DEBUG nova.compute.provider_tree [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.313767] env[61473]: DEBUG nova.scheduler.client.report [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.330238] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.639s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.330803] env[61473]: ERROR nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 878.330803] env[61473]: Faults: ['InvalidArgument'] [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Traceback (most recent call last): [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self.driver.spawn(context, instance, image_meta, [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self._fetch_image_if_missing(context, vi) [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] image_cache(vi, tmp_image_ds_loc) [ 878.330803] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] vm_util.copy_virtual_disk( [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] session._wait_for_task(vmdk_copy_task) [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return self.wait_for_task(task_ref) [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return evt.wait() [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] result = hub.switch() [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] return self.greenlet.switch() [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 878.331183] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] self.f(*self.args, **self.kw) [ 878.331535] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 878.331535] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] raise exceptions.translate_fault(task_info.error) [ 878.331535] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 878.331535] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Faults: ['InvalidArgument'] [ 878.331535] env[61473]: ERROR nova.compute.manager [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] [ 878.331675] env[61473]: DEBUG nova.compute.utils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 878.332994] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Build of instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 was re-scheduled: A specified parameter was not correct: fileType [ 878.332994] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 878.333435] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 878.333614] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 878.333770] env[61473]: DEBUG nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 878.333931] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 878.697523] env[61473]: DEBUG nova.network.neutron [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.709645] env[61473]: INFO nova.compute.manager [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Took 0.38 seconds to deallocate network for instance. [ 878.808559] env[61473]: INFO nova.scheduler.client.report [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Deleted allocations for instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 [ 878.831429] env[61473]: DEBUG oslo_concurrency.lockutils [None req-cfbf72e0-6e28-446c-99d3-5d29fc1a9dfb tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.183s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.832615] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 196.637s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.832807] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] During sync_power_state the instance has a pending task (spawning). Skip. [ 878.832981] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.833641] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.028s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.833862] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Acquiring lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.834077] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.834247] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.836512] env[61473]: INFO nova.compute.manager [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Terminating instance [ 878.840766] env[61473]: DEBUG nova.compute.manager [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 878.840944] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 878.841479] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15428847-bdcb-4d57-a34a-fb16c1cef764 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.846847] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 878.853296] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d8f649-8f1b-454d-aa4f-65ea1e4068d4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.881990] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28261a41-7f6d-495c-abbd-7f73f67e80d6 could not be found. [ 878.882232] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 878.882411] env[61473]: INFO nova.compute.manager [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 878.882671] env[61473]: DEBUG oslo.service.loopingcall [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.883658] env[61473]: DEBUG nova.compute.manager [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 878.883776] env[61473]: DEBUG nova.network.neutron [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 878.900076] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.900315] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.901723] env[61473]: INFO nova.compute.claims [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.914024] env[61473]: DEBUG nova.network.neutron [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.929166] env[61473]: INFO nova.compute.manager [-] [instance: 28261a41-7f6d-495c-abbd-7f73f67e80d6] Took 0.05 seconds to deallocate network for instance. [ 879.055190] env[61473]: DEBUG oslo_concurrency.lockutils [None req-347b00f0-3df2-4001-8c6c-83cac2bdbab2 tempest-ServersAdminNegativeTestJSON-628603657 tempest-ServersAdminNegativeTestJSON-628603657-project-member] Lock "28261a41-7f6d-495c-abbd-7f73f67e80d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.355105] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cb4fc4-f922-4f10-ba48-9e94d2ede4ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.362868] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3823e11-953c-4a71-850c-a55ca5bed034 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.393053] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2291d5bc-46d6-46bd-9d3d-6280c9d2d9d4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.400388] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0470e191-dd4f-43f9-ac0c-006124ef328a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.413610] env[61473]: DEBUG nova.compute.provider_tree [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.424595] env[61473]: DEBUG nova.scheduler.client.report [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.439533] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.539s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.440031] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 879.470178] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "dc8b5106-5657-409b-b425-b929c8e893d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.475050] env[61473]: DEBUG nova.compute.utils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.476258] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 879.476428] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 879.488929] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 879.562251] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 879.590677] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 879.591043] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 879.591127] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.591355] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 879.591531] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.591787] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 879.592068] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 879.592254] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 879.592467] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 879.592672] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 879.592864] env[61473]: DEBUG nova.virt.hardware [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 879.594032] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06427315-5740-40a9-a67f-5a2215e4d116 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.603099] env[61473]: DEBUG nova.policy [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79c0cb8540ce460396182931069f1b02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57f73c994ac74204a438ab789205ba29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 879.605655] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b08d81d-e705-49b9-abfc-e15c897d5e5a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.356388] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Successfully created port: c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.244073] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Successfully updated port: c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.256360] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.256464] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquired lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.256612] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.315241] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 881.342084] env[61473]: DEBUG nova.compute.manager [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Received event network-vif-plugged-c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 881.342310] env[61473]: DEBUG oslo_concurrency.lockutils [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] Acquiring lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.342518] env[61473]: DEBUG oslo_concurrency.lockutils [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.342683] env[61473]: DEBUG oslo_concurrency.lockutils [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.342848] env[61473]: DEBUG nova.compute.manager [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] No waiting events found dispatching network-vif-plugged-c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 881.343031] env[61473]: WARNING nova.compute.manager [req-21ac7975-706e-480e-b7a9-26ff6f8a9fd0 req-ce045285-0c6c-4a51-8871-1ec1da2259c3 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Received unexpected event network-vif-plugged-c207d499-bca8-44de-937e-c3fd73f4aa9b for instance with vm_state building and task_state spawning. [ 881.571506] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Updating instance_info_cache with network_info: [{"id": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "address": "fa:16:3e:d0:30:51", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc207d499-bc", "ovs_interfaceid": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.589339] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Releasing lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.589339] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance network_info: |[{"id": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "address": "fa:16:3e:d0:30:51", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc207d499-bc", "ovs_interfaceid": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 881.590562] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:30:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c207d499-bca8-44de-937e-c3fd73f4aa9b', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.596446] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Creating folder: Project (57f73c994ac74204a438ab789205ba29). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 881.597124] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2c0bafc-ed66-49f6-ae0a-99d7536c5fad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.614181] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Created folder: Project (57f73c994ac74204a438ab789205ba29) in parent group-v843485. [ 881.614181] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Creating folder: Instances. Parent ref: group-v843523. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 881.614181] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf9f2f1c-3496-438a-af50-bd0df0eac38d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.616257] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.627018] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Created folder: Instances in parent group-v843523. [ 881.627018] env[61473]: DEBUG oslo.service.loopingcall [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.627018] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 881.627018] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e72fc7fe-cf00-4e8a-900a-78556d74887e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.645500] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.645500] env[61473]: value = "task-4281544" [ 881.645500] env[61473]: _type = "Task" [ 881.645500] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.653325] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281544, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.156692] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281544, 'name': CreateVM_Task, 'duration_secs': 0.282637} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.156864] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 882.157543] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.157707] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.158036] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.158350] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fdbd8c-454a-4845-8934-d0e74a97d585 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.165132] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Waiting for the task: (returnval){ [ 882.165132] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]522a3a59-bee1-21e2-f40a-d0aaf346b6f7" [ 882.165132] env[61473]: _type = "Task" [ 882.165132] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.178061] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]522a3a59-bee1-21e2-f40a-d0aaf346b6f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.675247] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.675510] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.675713] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.587462] env[61473]: DEBUG nova.compute.manager [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Received event network-changed-c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 883.587549] env[61473]: DEBUG nova.compute.manager [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Refreshing instance network info cache due to event network-changed-c207d499-bca8-44de-937e-c3fd73f4aa9b. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 883.587739] env[61473]: DEBUG oslo_concurrency.lockutils [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] Acquiring lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.587878] env[61473]: DEBUG oslo_concurrency.lockutils [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] Acquired lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.588051] env[61473]: DEBUG nova.network.neutron [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Refreshing network info cache for port c207d499-bca8-44de-937e-c3fd73f4aa9b {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.147194] env[61473]: DEBUG nova.network.neutron [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Updated VIF entry in instance network info cache for port c207d499-bca8-44de-937e-c3fd73f4aa9b. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 884.147194] env[61473]: DEBUG nova.network.neutron [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Updating instance_info_cache with network_info: [{"id": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "address": "fa:16:3e:d0:30:51", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc207d499-bc", "ovs_interfaceid": "c207d499-bca8-44de-937e-c3fd73f4aa9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.162775] env[61473]: DEBUG oslo_concurrency.lockutils [req-510621c3-851f-4099-9be9-4af67bc0780f req-bbcfa396-4858-4c15-a9a2-087f82181f82 service nova] Releasing lock "refresh_cache-57eb1cd4-7c95-4173-800b-385bed2dbbbe" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.248971] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.764821] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "c6880758-25cf-4078-9455-827db6fb6435" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.002683] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "9031b0d9-4e07-4afa-a597-770b80df2511" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.002944] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.008146] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "e28da414-8fb8-4470-873a-a285925dd988" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.475550] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "d9395a72-994b-4baf-a296-2fc3d05a239c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.565763] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.576242] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.576584] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.156960] env[61473]: DEBUG oslo_concurrency.lockutils [None req-479a6f6f-3fd5-4cfd-ad35-8e2bc5fc1709 tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "95e4c8b2-41c9-4882-a5bf-0b4a7b14e726" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.157834] env[61473]: DEBUG oslo_concurrency.lockutils [None req-479a6f6f-3fd5-4cfd-ad35-8e2bc5fc1709 tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "95e4c8b2-41c9-4882-a5bf-0b4a7b14e726" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.742127] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82bcfb07-86a2-4064-987a-d284ffe93dd5 tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Acquiring lock "80de92ab-332a-4e1f-8cd0-61cbaa791e06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.742527] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82bcfb07-86a2-4064-987a-d284ffe93dd5 tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Lock "80de92ab-332a-4e1f-8cd0-61cbaa791e06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.775824] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1095ce42-2e43-4064-85a1-0a9b9f2dbbb5 tempest-ServersAaction247Test-827318745 tempest-ServersAaction247Test-827318745-project-member] Acquiring lock "7bce47a9-edeb-4ecb-b946-c29b2f360ac4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.776171] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1095ce42-2e43-4064-85a1-0a9b9f2dbbb5 tempest-ServersAaction247Test-827318745 tempest-ServersAaction247Test-827318745-project-member] Lock "7bce47a9-edeb-4ecb-b946-c29b2f360ac4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.971418] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7a313f71-36c0-41db-b339-79371e280ecf tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Acquiring lock "c5efb3fe-7432-4daf-9fff-518781b8f435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.972391] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7a313f71-36c0-41db-b339-79371e280ecf tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Lock "c5efb3fe-7432-4daf-9fff-518781b8f435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.968683] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.968916] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11255}} [ 920.984301] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] There are 0 instances to clean {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 920.984603] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.984658] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 920.998450] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.590866] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9370e05d-b156-414b-b0ce-1be355a39b10 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Acquiring lock "fc93905f-f07a-4735-9297-2dbc1e2b0066" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.591376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9370e05d-b156-414b-b0ce-1be355a39b10 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "fc93905f-f07a-4735-9297-2dbc1e2b0066" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.004308] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.004620] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.966714] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.967128] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.968718] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.968976] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 924.969539] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.983987] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.984578] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.984881] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.985542] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 924.986938] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ea9a29-15c3-45d0-a8ee-bdb0cc0d8768 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.001023] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef847a0e-64f7-4358-b617-14c1509bea88 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.021842] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c5a928-1c0e-44b7-9645-79e799b1e0f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.031934] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d992adb7-57ba-406b-b9bf-a03563fc32b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.068620] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180609MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 925.068801] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.069052] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.232127] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.232305] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.232437] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dc8b5106-5657-409b-b425-b929c8e893d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.232625] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.232774] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.232899] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.233031] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.233152] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.233267] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.233382] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 925.246447] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.258254] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8f80e386-439c-456e-a4ad-d643de6ae1b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.269436] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5a356b28-fa9a-4fe6-ab01-e5576d802e8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.286913] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19550523-59ec-4891-8695-9939b1baefbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.298770] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0091006b-084b-40e7-8d60-a2b43acc08a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.310802] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8ac20624-5031-495d-bdf0-a5a7938539a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.323576] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 50977f71-35b5-46e5-8096-5725c8053295 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.335066] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.348444] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d2946031-980f-4ee9-8818-f4d7584d4e3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.359819] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b7aecc9b-4032-4e15-963e-6cc270af55f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.372024] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 77271f0d-6c43-4ecc-9211-e16c977b8531 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.384675] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f081b1bc-1d1f-4b5c-8690-e0186c1a7793 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.396195] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance dd63d5b4-50a7-4dce-8e2f-3eac7d55e424 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.407452] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.418799] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.430685] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.449912] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 95e4c8b2-41c9-4882-a5bf-0b4a7b14e726 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.463759] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 80de92ab-332a-4e1f-8cd0-61cbaa791e06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.483585] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bce47a9-edeb-4ecb-b946-c29b2f360ac4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.495971] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c5efb3fe-7432-4daf-9fff-518781b8f435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.506514] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance fc93905f-f07a-4735-9297-2dbc1e2b0066 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 925.506956] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 925.507316] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 925.530054] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 925.556203] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 925.556288] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 925.568482] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 925.599825] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 925.936412] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a6565c74-f523-447f-8828-aa5b82650a55 tempest-ServerRescueTestJSON-1102761856 tempest-ServerRescueTestJSON-1102761856-project-member] Acquiring lock "9db4fe27-6702-41ec-b2c3-813918bbdb56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.936412] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a6565c74-f523-447f-8828-aa5b82650a55 tempest-ServerRescueTestJSON-1102761856 tempest-ServerRescueTestJSON-1102761856-project-member] Lock "9db4fe27-6702-41ec-b2c3-813918bbdb56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.140234] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eb20eb-4dfe-4e44-bdfc-443c6f0218bc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.149253] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121a833c-23a8-436f-b3e9-61c605d6f89e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.181753] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccdbd5f-81d5-41f1-bbe6-b2a669fd2dd4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.190495] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ead2b3d-c6b9-437d-a104-7dfe13ba12de {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.205521] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.214270] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.231564] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 926.231767] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.163s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.339999] env[61473]: WARNING oslo_vmware.rw_handles [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 926.339999] env[61473]: ERROR oslo_vmware.rw_handles [ 926.340535] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 926.343082] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 926.343082] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Copying Virtual Disk [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/ce96f6e7-98ce-4e48-b017-5cf8f85a6843/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 926.343082] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4e2122a-db9c-4bb3-9a96-78904385e491 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.349587] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0dadf6f4-5e3f-48b7-b284-16424aebaed1 tempest-ServerGroupTestJSON-969771004 tempest-ServerGroupTestJSON-969771004-project-member] Acquiring lock "998bff91-85c3-4f70-8056-2e77a0d80f07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.349947] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0dadf6f4-5e3f-48b7-b284-16424aebaed1 tempest-ServerGroupTestJSON-969771004 tempest-ServerGroupTestJSON-969771004-project-member] Lock "998bff91-85c3-4f70-8056-2e77a0d80f07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.356786] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for the task: (returnval){ [ 926.356786] env[61473]: value = "task-4281554" [ 926.356786] env[61473]: _type = "Task" [ 926.356786] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.365842] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Task: {'id': task-4281554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.873130] env[61473]: DEBUG oslo_vmware.exceptions [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 926.873509] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.874136] env[61473]: ERROR nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.874136] env[61473]: Faults: ['InvalidArgument'] [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Traceback (most recent call last): [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] yield resources [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self.driver.spawn(context, instance, image_meta, [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self._fetch_image_if_missing(context, vi) [ 926.874136] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] image_cache(vi, tmp_image_ds_loc) [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] vm_util.copy_virtual_disk( [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] session._wait_for_task(vmdk_copy_task) [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return self.wait_for_task(task_ref) [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return evt.wait() [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] result = hub.switch() [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 926.874528] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return self.greenlet.switch() [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self.f(*self.args, **self.kw) [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] raise exceptions.translate_fault(task_info.error) [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Faults: ['InvalidArgument'] [ 926.874957] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] [ 926.874957] env[61473]: INFO nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Terminating instance [ 926.876467] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.876467] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.876984] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.877080] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.877312] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 926.878395] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acabc77d-e80e-4ff1-8a8a-04d4c54b26c2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.889797] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.890048] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 926.891265] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-663c63b3-3085-46a0-9efd-57315d151d24 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.898452] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 926.898452] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]527d2118-c926-a387-b887-bfd661ace08b" [ 926.898452] env[61473]: _type = "Task" [ 926.898452] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.909117] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]527d2118-c926-a387-b887-bfd661ace08b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.912357] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.044812] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.055065] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Releasing lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.055470] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 927.055667] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.056778] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1837e4-4b8f-434c-a7c9-dccc47d18d50 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.065543] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 927.065773] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-baed25e0-f21c-4361-a07d-129473307d44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.097898] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 927.097898] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 927.097898] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Deleting the datastore file [datastore2] dc8b5106-5657-409b-b425-b929c8e893d5 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.097898] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b0f4800-0bb8-4f44-96e0-6bacdcabbf26 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.105179] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for the task: (returnval){ [ 927.105179] env[61473]: value = "task-4281557" [ 927.105179] env[61473]: _type = "Task" [ 927.105179] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.114451] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Task: {'id': task-4281557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.227201] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.227481] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.227589] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 927.227725] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 927.249149] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.249395] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.249536] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.249674] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.249779] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.249899] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.250024] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.250146] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.250260] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.250377] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 927.250494] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 927.250987] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.410773] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 927.411345] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.411650] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30bbe3d5-5213-479e-a04d-1566a00f1767 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.424525] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.424776] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Fetch image to [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 927.424964] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 927.425746] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b51d69-4522-410d-873e-f7eacd31bd61 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.433187] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c086f7a7-d244-472b-94d8-63c73c381a58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.448151] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23128bee-456f-4aa6-8a23-27d97685ccdb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.485573] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a326525a-ab6b-41ce-ae14-6bdd103eff48 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.493043] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca6dabe3-db75-4b51-be0c-6c409abca657 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.518928] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 927.599930] env[61473]: DEBUG oslo_vmware.rw_handles [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 927.670983] env[61473]: DEBUG oslo_vmware.rw_handles [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 927.671193] env[61473]: DEBUG oslo_vmware.rw_handles [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 927.676712] env[61473]: DEBUG oslo_vmware.api [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Task: {'id': task-4281557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037619} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.676990] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.677145] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 927.677336] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 927.677568] env[61473]: INFO nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 927.677739] env[61473]: DEBUG oslo.service.loopingcall [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.677942] env[61473]: DEBUG nova.compute.manager [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 927.680155] env[61473]: DEBUG nova.compute.claims [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 927.680320] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.680551] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.144641] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71a79d4-789e-4afa-9422-54e6e0c498c0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.152477] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d3839a-84db-4b11-9367-cafc6290dfcc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.183616] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc7fd21-e759-4b6d-9ee3-b7d510b82052 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.191732] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c22a15-627c-4ff1-865d-bef40a2bcdf3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.205817] env[61473]: DEBUG nova.compute.provider_tree [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.214426] env[61473]: DEBUG nova.scheduler.client.report [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.232558] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.552s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.233125] env[61473]: ERROR nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.233125] env[61473]: Faults: ['InvalidArgument'] [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Traceback (most recent call last): [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self.driver.spawn(context, instance, image_meta, [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self._fetch_image_if_missing(context, vi) [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] image_cache(vi, tmp_image_ds_loc) [ 928.233125] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] vm_util.copy_virtual_disk( [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] session._wait_for_task(vmdk_copy_task) [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return self.wait_for_task(task_ref) [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return evt.wait() [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] result = hub.switch() [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] return self.greenlet.switch() [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 928.233535] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] self.f(*self.args, **self.kw) [ 928.233881] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 928.233881] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] raise exceptions.translate_fault(task_info.error) [ 928.233881] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.233881] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Faults: ['InvalidArgument'] [ 928.233881] env[61473]: ERROR nova.compute.manager [instance: dc8b5106-5657-409b-b425-b929c8e893d5] [ 928.233881] env[61473]: DEBUG nova.compute.utils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 928.238474] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Build of instance dc8b5106-5657-409b-b425-b929c8e893d5 was re-scheduled: A specified parameter was not correct: fileType [ 928.238474] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 928.238866] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 928.239108] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.239321] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.240254] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 928.284371] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.390206] env[61473]: DEBUG nova.network.neutron [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.403714] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Releasing lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.405185] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 928.405493] env[61473]: DEBUG nova.compute.manager [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 928.531014] env[61473]: INFO nova.scheduler.client.report [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Deleted allocations for instance dc8b5106-5657-409b-b425-b929c8e893d5 [ 928.553387] env[61473]: DEBUG oslo_concurrency.lockutils [None req-96dd78d4-2ca6-40e2-9b11-be4ecff99745 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 247.359s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.554461] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 246.358s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.555427] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] During sync_power_state the instance has a pending task (spawning). Skip. [ 928.555427] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.555427] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 49.086s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.555662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "dc8b5106-5657-409b-b425-b929c8e893d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.555882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.556009] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.560175] env[61473]: INFO nova.compute.manager [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Terminating instance [ 928.564602] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquiring lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.564602] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Acquired lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.564602] env[61473]: DEBUG nova.network.neutron [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 928.592875] env[61473]: DEBUG nova.compute.manager [None req-20e25c58-3ea2-4b25-a762-2db8467f13d9 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] [instance: 0a233986-dc15-431b-bf1b-58bbb14c9965] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 928.627837] env[61473]: DEBUG nova.network.neutron [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.649999] env[61473]: DEBUG nova.compute.manager [None req-20e25c58-3ea2-4b25-a762-2db8467f13d9 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] [instance: 0a233986-dc15-431b-bf1b-58bbb14c9965] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 928.680264] env[61473]: DEBUG oslo_concurrency.lockutils [None req-20e25c58-3ea2-4b25-a762-2db8467f13d9 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Lock "0a233986-dc15-431b-bf1b-58bbb14c9965" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.455s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.693210] env[61473]: DEBUG nova.compute.manager [None req-441dffd4-24ef-420a-a5dd-bc062b9e0196 tempest-AttachInterfacesUnderV243Test-235916781 tempest-AttachInterfacesUnderV243Test-235916781-project-member] [instance: ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 928.726466] env[61473]: DEBUG nova.compute.manager [None req-441dffd4-24ef-420a-a5dd-bc062b9e0196 tempest-AttachInterfacesUnderV243Test-235916781 tempest-AttachInterfacesUnderV243Test-235916781-project-member] [instance: ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 928.745610] env[61473]: DEBUG nova.network.neutron [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.753754] env[61473]: DEBUG oslo_concurrency.lockutils [None req-441dffd4-24ef-420a-a5dd-bc062b9e0196 tempest-AttachInterfacesUnderV243Test-235916781 tempest-AttachInterfacesUnderV243Test-235916781-project-member] Lock "ca5bf1d1-8ef4-44da-bc74-ff0dfb0fac80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.389s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.757391] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Releasing lock "refresh_cache-dc8b5106-5657-409b-b425-b929c8e893d5" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.757793] env[61473]: DEBUG nova.compute.manager [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 928.757979] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.758495] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfdf002d-191d-4973-85e7-cc2e2b98810f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.763621] env[61473]: DEBUG nova.compute.manager [None req-422c6e8c-7a92-4808-a265-994264170469 tempest-ServersWithSpecificFlavorTestJSON-1210934305 tempest-ServersWithSpecificFlavorTestJSON-1210934305-project-member] [instance: 28b6c493-6d01-475d-818a-93540528a3f4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 928.775028] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b043f2b2-c4f5-4a94-a188-59fee6833fad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.807729] env[61473]: DEBUG nova.compute.manager [None req-422c6e8c-7a92-4808-a265-994264170469 tempest-ServersWithSpecificFlavorTestJSON-1210934305 tempest-ServersWithSpecificFlavorTestJSON-1210934305-project-member] [instance: 28b6c493-6d01-475d-818a-93540528a3f4] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 928.819586] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dc8b5106-5657-409b-b425-b929c8e893d5 could not be found. [ 928.819586] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 928.819752] env[61473]: INFO nova.compute.manager [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Took 0.06 seconds to destroy the instance on the hypervisor. [ 928.819889] env[61473]: DEBUG oslo.service.loopingcall [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.820878] env[61473]: DEBUG nova.compute.manager [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 928.820978] env[61473]: DEBUG nova.network.neutron [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 928.834548] env[61473]: DEBUG oslo_concurrency.lockutils [None req-422c6e8c-7a92-4808-a265-994264170469 tempest-ServersWithSpecificFlavorTestJSON-1210934305 tempest-ServersWithSpecificFlavorTestJSON-1210934305-project-member] Lock "28b6c493-6d01-475d-818a-93540528a3f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.147s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.847382] env[61473]: DEBUG nova.network.neutron [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.850821] env[61473]: DEBUG nova.compute.manager [None req-15f78cc6-f235-401f-a824-1a4abf72c058 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] [instance: f1938744-dd32-4992-9cf9-53d81491e4a7] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 928.856545] env[61473]: DEBUG nova.network.neutron [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.868259] env[61473]: INFO nova.compute.manager [-] [instance: dc8b5106-5657-409b-b425-b929c8e893d5] Took 0.05 seconds to deallocate network for instance. [ 928.887363] env[61473]: DEBUG nova.compute.manager [None req-15f78cc6-f235-401f-a824-1a4abf72c058 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] [instance: f1938744-dd32-4992-9cf9-53d81491e4a7] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 928.921911] env[61473]: DEBUG oslo_concurrency.lockutils [None req-15f78cc6-f235-401f-a824-1a4abf72c058 tempest-ListImageFiltersTestJSON-565567774 tempest-ListImageFiltersTestJSON-565567774-project-member] Lock "f1938744-dd32-4992-9cf9-53d81491e4a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.819s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.958284] env[61473]: DEBUG nova.compute.manager [None req-0748c056-d9a2-43ae-ba7a-b91024690631 tempest-InstanceActionsV221TestJSON-2016883952 tempest-InstanceActionsV221TestJSON-2016883952-project-member] [instance: e6071ce1-467f-4082-b885-adb4555634d2] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 928.989033] env[61473]: DEBUG nova.compute.manager [None req-0748c056-d9a2-43ae-ba7a-b91024690631 tempest-InstanceActionsV221TestJSON-2016883952 tempest-InstanceActionsV221TestJSON-2016883952-project-member] [instance: e6071ce1-467f-4082-b885-adb4555634d2] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.019163] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0748c056-d9a2-43ae-ba7a-b91024690631 tempest-InstanceActionsV221TestJSON-2016883952 tempest-InstanceActionsV221TestJSON-2016883952-project-member] Lock "e6071ce1-467f-4082-b885-adb4555634d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.277s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.034171] env[61473]: DEBUG nova.compute.manager [None req-9b5cf1e7-09b4-4a62-b3e6-cb223110eeab tempest-ImagesOneServerNegativeTestJSON-902804932 tempest-ImagesOneServerNegativeTestJSON-902804932-project-member] [instance: 23c4d824-ec68-42ad-b50d-ee33d8c833a8] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.042024] env[61473]: DEBUG oslo_concurrency.lockutils [None req-42668428-ea0e-4113-b720-89d644e031a9 tempest-ServerDiagnosticsV248Test-926135278 tempest-ServerDiagnosticsV248Test-926135278-project-member] Lock "dc8b5106-5657-409b-b425-b929c8e893d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.486s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.076472] env[61473]: DEBUG nova.compute.manager [None req-9b5cf1e7-09b4-4a62-b3e6-cb223110eeab tempest-ImagesOneServerNegativeTestJSON-902804932 tempest-ImagesOneServerNegativeTestJSON-902804932-project-member] [instance: 23c4d824-ec68-42ad-b50d-ee33d8c833a8] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.103492] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9b5cf1e7-09b4-4a62-b3e6-cb223110eeab tempest-ImagesOneServerNegativeTestJSON-902804932 tempest-ImagesOneServerNegativeTestJSON-902804932-project-member] Lock "23c4d824-ec68-42ad-b50d-ee33d8c833a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.855s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.116383] env[61473]: DEBUG nova.compute.manager [None req-b81bdafd-ec66-46de-9ef1-97c365d98593 tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] [instance: f5079713-8e14-41b8-84db-2b599f6e136e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.146518] env[61473]: DEBUG nova.compute.manager [None req-b81bdafd-ec66-46de-9ef1-97c365d98593 tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] [instance: f5079713-8e14-41b8-84db-2b599f6e136e] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.176518] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b81bdafd-ec66-46de-9ef1-97c365d98593 tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Lock "f5079713-8e14-41b8-84db-2b599f6e136e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.997s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.187738] env[61473]: DEBUG nova.compute.manager [None req-a11e4869-78a5-4f81-970e-ea8041232616 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 72a6972e-a183-4629-ba23-08135882ea29] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.213377] env[61473]: DEBUG nova.compute.manager [None req-a11e4869-78a5-4f81-970e-ea8041232616 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 72a6972e-a183-4629-ba23-08135882ea29] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.261803] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a11e4869-78a5-4f81-970e-ea8041232616 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "72a6972e-a183-4629-ba23-08135882ea29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.681s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.276013] env[61473]: DEBUG nova.compute.manager [None req-639b609c-075e-4de1-9107-97fff389e406 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.313647] env[61473]: DEBUG nova.compute.manager [None req-639b609c-075e-4de1-9107-97fff389e406 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.342554] env[61473]: DEBUG oslo_concurrency.lockutils [None req-639b609c-075e-4de1-9107-97fff389e406 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "2033c40a-eea5-4a1f-ab50-56f6aa0c4c9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.986s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.353442] env[61473]: DEBUG nova.compute.manager [None req-87aff8fb-562b-4871-b615-9eb74fd41ee8 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 762e77db-04ae-474e-8f6e-e6b8c81ecf47] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.379742] env[61473]: DEBUG nova.compute.manager [None req-87aff8fb-562b-4871-b615-9eb74fd41ee8 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] [instance: 762e77db-04ae-474e-8f6e-e6b8c81ecf47] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.415204] env[61473]: DEBUG oslo_concurrency.lockutils [None req-87aff8fb-562b-4871-b615-9eb74fd41ee8 tempest-ListServerFiltersTestJSON-1034219397 tempest-ListServerFiltersTestJSON-1034219397-project-member] Lock "762e77db-04ae-474e-8f6e-e6b8c81ecf47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.905s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.426620] env[61473]: DEBUG nova.compute.manager [None req-77c82baa-9e79-405c-bae4-57f971518b0c tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 9e023463-2573-4518-b6a4-cb1bd3bc0224] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.460927] env[61473]: DEBUG nova.compute.manager [None req-77c82baa-9e79-405c-bae4-57f971518b0c tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 9e023463-2573-4518-b6a4-cb1bd3bc0224] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.488244] env[61473]: DEBUG oslo_concurrency.lockutils [None req-77c82baa-9e79-405c-bae4-57f971518b0c tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "9e023463-2573-4518-b6a4-cb1bd3bc0224" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.018s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.498614] env[61473]: DEBUG nova.compute.manager [None req-bc38ebaf-680a-4a61-99f8-c4385638579f tempest-FloatingIPsAssociationNegativeTestJSON-293916846 tempest-FloatingIPsAssociationNegativeTestJSON-293916846-project-member] [instance: e2c7e712-922a-4fc9-882d-03c425fbdf4e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.529768] env[61473]: DEBUG nova.compute.manager [None req-bc38ebaf-680a-4a61-99f8-c4385638579f tempest-FloatingIPsAssociationNegativeTestJSON-293916846 tempest-FloatingIPsAssociationNegativeTestJSON-293916846-project-member] [instance: e2c7e712-922a-4fc9-882d-03c425fbdf4e] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 929.554993] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bc38ebaf-680a-4a61-99f8-c4385638579f tempest-FloatingIPsAssociationNegativeTestJSON-293916846 tempest-FloatingIPsAssociationNegativeTestJSON-293916846-project-member] Lock "e2c7e712-922a-4fc9-882d-03c425fbdf4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.695s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.566264] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 929.658270] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.658603] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.660595] env[61473]: INFO nova.compute.claims [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.233390] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd9c994-c7a9-470c-a281-7fd27f535ab8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.244131] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517f9465-5bd1-4c32-9cf5-36eb658f0063 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.274145] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e85e35-ec5f-40a5-9145-0eb6c731e715 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.282845] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf3e929-de81-4893-8595-bb78c2b18c4f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.296814] env[61473]: DEBUG nova.compute.provider_tree [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.311248] env[61473]: DEBUG nova.scheduler.client.report [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.331880] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.673s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.332462] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 930.383749] env[61473]: DEBUG nova.compute.utils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.385714] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 930.385911] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 930.403034] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 930.490153] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 930.522509] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.522781] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.522912] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.523097] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.528622] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.528622] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.528622] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.528622] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.528622] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.529087] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.529087] env[61473]: DEBUG nova.virt.hardware [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.529087] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5925c91-d554-41f8-9485-0fa805d16d35 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.533023] env[61473]: DEBUG nova.policy [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed41fc16652481cb2b544cd792db1ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e0a2e7f01674740942b8185f4261d86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 930.541065] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef6d90c-6f4c-4f74-8da4-46d021cc9704 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.400354] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Successfully created port: 924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.170203] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "3a350a34-7728-493f-a737-7a6a3071363e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.325326] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Successfully updated port: 924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.344966] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.345165] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.345287] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 933.428257] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 933.620036] env[61473]: DEBUG nova.compute.manager [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Received event network-vif-plugged-924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 933.620286] env[61473]: DEBUG oslo_concurrency.lockutils [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] Acquiring lock "3a350a34-7728-493f-a737-7a6a3071363e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.621143] env[61473]: DEBUG oslo_concurrency.lockutils [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] Lock "3a350a34-7728-493f-a737-7a6a3071363e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.621143] env[61473]: DEBUG oslo_concurrency.lockutils [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] Lock "3a350a34-7728-493f-a737-7a6a3071363e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.621143] env[61473]: DEBUG nova.compute.manager [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] No waiting events found dispatching network-vif-plugged-924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 933.621143] env[61473]: WARNING nova.compute.manager [req-307faaa9-944b-4e57-bfe5-9b43a60f15f0 req-c0a21679-fef0-4b25-bdcf-c366bedf38b7 service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Received unexpected event network-vif-plugged-924608a0-0ca7-4657-b1bf-412172473bc5 for instance with vm_state building and task_state deleting. [ 933.972097] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updating instance_info_cache with network_info: [{"id": "924608a0-0ca7-4657-b1bf-412172473bc5", "address": "fa:16:3e:cb:8c:d5", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924608a0-0c", "ovs_interfaceid": "924608a0-0ca7-4657-b1bf-412172473bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.990143] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.990466] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance network_info: |[{"id": "924608a0-0ca7-4657-b1bf-412172473bc5", "address": "fa:16:3e:cb:8c:d5", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924608a0-0c", "ovs_interfaceid": "924608a0-0ca7-4657-b1bf-412172473bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 933.990904] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:8c:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '924608a0-0ca7-4657-b1bf-412172473bc5', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.001513] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating folder: Project (7e0a2e7f01674740942b8185f4261d86). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 934.003066] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b991ca0-12bc-4957-bae0-35b044771f03 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.018205] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created folder: Project (7e0a2e7f01674740942b8185f4261d86) in parent group-v843485. [ 934.018205] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating folder: Instances. Parent ref: group-v843530. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 934.018205] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed4d1653-6369-426f-a2fd-031642168b4f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.031114] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created folder: Instances in parent group-v843530. [ 934.031701] env[61473]: DEBUG oslo.service.loopingcall [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.031820] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 934.032090] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12ad03bb-906e-45de-9ced-d74136e341ce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.057063] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.057063] env[61473]: value = "task-4281561" [ 934.057063] env[61473]: _type = "Task" [ 934.057063] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.067426] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281561, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.568210] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281561, 'name': CreateVM_Task, 'duration_secs': 0.353483} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.568547] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 934.571189] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.571843] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.572422] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.572490] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d79dd746-5e3a-4d24-a887-6929dcbde62e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.580581] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 934.580581] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52cc288a-2819-3851-9192-6d9a38274dbf" [ 934.580581] env[61473]: _type = "Task" [ 934.580581] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.594040] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52cc288a-2819-3851-9192-6d9a38274dbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.732992] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "889ee8c2-615c-477e-8fc5-65241759dc5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.733320] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.093342] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.093549] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.093753] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.722985] env[61473]: DEBUG oslo_concurrency.lockutils [None req-55a4d609-802c-4ba1-83f2-43fd57ab1cc2 tempest-ServerAddressesNegativeTestJSON-253754799 tempest-ServerAddressesNegativeTestJSON-253754799-project-member] Acquiring lock "f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.723360] env[61473]: DEBUG oslo_concurrency.lockutils [None req-55a4d609-802c-4ba1-83f2-43fd57ab1cc2 tempest-ServerAddressesNegativeTestJSON-253754799 tempest-ServerAddressesNegativeTestJSON-253754799-project-member] Lock "f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.077244] env[61473]: DEBUG nova.compute.manager [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Received event network-changed-924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 937.077508] env[61473]: DEBUG nova.compute.manager [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Refreshing instance network info cache due to event network-changed-924608a0-0ca7-4657-b1bf-412172473bc5. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 937.078067] env[61473]: DEBUG oslo_concurrency.lockutils [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] Acquiring lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.078228] env[61473]: DEBUG oslo_concurrency.lockutils [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] Acquired lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.078397] env[61473]: DEBUG nova.network.neutron [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Refreshing network info cache for port 924608a0-0ca7-4657-b1bf-412172473bc5 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.963239] env[61473]: DEBUG nova.network.neutron [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updated VIF entry in instance network info cache for port 924608a0-0ca7-4657-b1bf-412172473bc5. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 937.963694] env[61473]: DEBUG nova.network.neutron [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updating instance_info_cache with network_info: [{"id": "924608a0-0ca7-4657-b1bf-412172473bc5", "address": "fa:16:3e:cb:8c:d5", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap924608a0-0c", "ovs_interfaceid": "924608a0-0ca7-4657-b1bf-412172473bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.980859] env[61473]: DEBUG oslo_concurrency.lockutils [req-d728a470-4d22-4120-9857-155c280d87bd req-8fde8b2c-a1ac-481f-8b56-682aca3e315d service nova] Releasing lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.828630] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "a12b01db-28b4-477d-aef2-99304505d8c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.828630] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.801219] env[61473]: DEBUG oslo_concurrency.lockutils [None req-02a867fe-4b88-40d6-8150-7e8736df8e7d tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Acquiring lock "7bc8115c-903f-47f2-bf6a-a9272fe0f044" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.801561] env[61473]: DEBUG oslo_concurrency.lockutils [None req-02a867fe-4b88-40d6-8150-7e8736df8e7d tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Lock "7bc8115c-903f-47f2-bf6a-a9272fe0f044" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.204196] env[61473]: WARNING oslo_vmware.rw_handles [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 975.204196] env[61473]: ERROR oslo_vmware.rw_handles [ 975.204818] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 975.206891] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 975.207169] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Copying Virtual Disk [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/ab904a2b-198b-4d15-88dd-b44578c817b3/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 975.207468] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-159ef7cc-7a72-4244-b97f-054190e50c3f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.216300] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 975.216300] env[61473]: value = "task-4281571" [ 975.216300] env[61473]: _type = "Task" [ 975.216300] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.224800] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.726876] env[61473]: DEBUG oslo_vmware.exceptions [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 975.727190] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.727803] env[61473]: ERROR nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 975.727803] env[61473]: Faults: ['InvalidArgument'] [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Traceback (most recent call last): [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] yield resources [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self.driver.spawn(context, instance, image_meta, [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self._fetch_image_if_missing(context, vi) [ 975.727803] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] image_cache(vi, tmp_image_ds_loc) [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] vm_util.copy_virtual_disk( [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] session._wait_for_task(vmdk_copy_task) [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return self.wait_for_task(task_ref) [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return evt.wait() [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] result = hub.switch() [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 975.728226] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return self.greenlet.switch() [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self.f(*self.args, **self.kw) [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] raise exceptions.translate_fault(task_info.error) [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Faults: ['InvalidArgument'] [ 975.728931] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] [ 975.728931] env[61473]: INFO nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Terminating instance [ 975.729759] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.729960] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.730214] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc44e66d-38b5-4f96-913c-3db8db376457 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.732714] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 975.732941] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 975.733712] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425c6c55-b238-447e-b3a0-e95397217037 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.753272] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 975.754429] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-710d1e86-6a90-436a-b535-f0d049b1d7c6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.756153] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.756331] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 975.757015] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-377e7779-29e0-4ab8-a1b0-8afec7ad0dd5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.762792] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 975.762792] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52753731-97c3-43ab-f2b4-d0894221d23e" [ 975.762792] env[61473]: _type = "Task" [ 975.762792] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.772417] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52753731-97c3-43ab-f2b4-d0894221d23e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.828026] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 975.828288] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 975.828509] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleting the datastore file [datastore2] 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.828788] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73f2e5cd-0434-4d84-9430-48dfac8136ed {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.836444] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 975.836444] env[61473]: value = "task-4281573" [ 975.836444] env[61473]: _type = "Task" [ 975.836444] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.844435] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.273196] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 976.273483] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating directory with path [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.273645] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-053a17fb-c38f-4f7f-8578-e2de9a5dd540 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.285271] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created directory with path [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.286062] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Fetch image to [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 976.286062] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 976.286448] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf2c1da-fee8-4104-8f2f-a28a778fbfa3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.295236] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8623fa-8128-4fdf-9490-53ea89b7ea64 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.304870] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c7ad31-cb7a-465a-9872-d254626db442 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.337271] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cafbd2-d097-487a-b989-41fd2f125f30 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.350642] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cf3667fe-36e1-4b2f-b530-ca1f796df33a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.352547] env[61473]: DEBUG oslo_vmware.api [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281573, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074608} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.352801] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.353074] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 976.353166] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 976.353348] env[61473]: INFO nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Took 0.62 seconds to destroy the instance on the hypervisor. [ 976.355509] env[61473]: DEBUG nova.compute.claims [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 976.355708] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.355931] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.377289] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 976.500027] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 976.563912] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 976.564209] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 976.783355] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d17d7e0-b586-4c24-85d9-6b30d8105411 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.791191] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb722de5-d9d4-450d-b203-50d41d714114 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.822714] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dd1c84-106f-4558-b1f6-cc9466d4e46d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.830555] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8f4bee-7bbb-44c1-8760-2e634610e3cd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.845709] env[61473]: DEBUG nova.compute.provider_tree [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.855115] env[61473]: DEBUG nova.scheduler.client.report [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.873502] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.517s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.874160] env[61473]: ERROR nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.874160] env[61473]: Faults: ['InvalidArgument'] [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Traceback (most recent call last): [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self.driver.spawn(context, instance, image_meta, [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self._fetch_image_if_missing(context, vi) [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] image_cache(vi, tmp_image_ds_loc) [ 976.874160] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] vm_util.copy_virtual_disk( [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] session._wait_for_task(vmdk_copy_task) [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return self.wait_for_task(task_ref) [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return evt.wait() [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] result = hub.switch() [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] return self.greenlet.switch() [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 976.875330] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] self.f(*self.args, **self.kw) [ 976.875739] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 976.875739] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] raise exceptions.translate_fault(task_info.error) [ 976.875739] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 976.875739] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Faults: ['InvalidArgument'] [ 976.875739] env[61473]: ERROR nova.compute.manager [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] [ 976.875739] env[61473]: DEBUG nova.compute.utils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 976.876714] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Build of instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 was re-scheduled: A specified parameter was not correct: fileType [ 976.876714] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 976.877103] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 976.877278] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 976.877448] env[61473]: DEBUG nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 976.877625] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.241257] env[61473]: DEBUG nova.network.neutron [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.265663] env[61473]: INFO nova.compute.manager [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Took 0.39 seconds to deallocate network for instance. [ 977.381035] env[61473]: INFO nova.scheduler.client.report [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted allocations for instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 [ 977.416974] env[61473]: DEBUG oslo_concurrency.lockutils [None req-238ed8f4-4554-4912-8b4b-dd294ca3a37a tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 304.057s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.418923] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 295.222s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.418923] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] During sync_power_state the instance has a pending task (spawning). Skip. [ 977.418923] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.419303] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 106.044s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.419517] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.419714] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.419871] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.423019] env[61473]: INFO nova.compute.manager [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Terminating instance [ 977.425180] env[61473]: DEBUG nova.compute.manager [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 977.425239] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 977.428020] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b48dce7-9e59-499a-bda4-006e693ea35d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.428913] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 8f80e386-439c-456e-a4ad-d643de6ae1b6] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.437678] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b681fb0-7ec9-4bed-b203-a2df9d191261 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.460700] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 8f80e386-439c-456e-a4ad-d643de6ae1b6] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.472258] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 079f1dc7-232a-4e21-9b0e-9fff2d16bab6 could not be found. [ 977.472465] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 977.472640] env[61473]: INFO nova.compute.manager [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 977.472897] env[61473]: DEBUG oslo.service.loopingcall [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.473730] env[61473]: DEBUG nova.compute.manager [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 977.473837] env[61473]: DEBUG nova.network.neutron [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 977.488200] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "8f80e386-439c-456e-a4ad-d643de6ae1b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 247.889s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.499778] env[61473]: DEBUG nova.compute.manager [None req-90e8ba48-ad82-4613-8955-992f31aced7d tempest-ServerActionsTestOtherB-1298047411 tempest-ServerActionsTestOtherB-1298047411-project-member] [instance: 5a356b28-fa9a-4fe6-ab01-e5576d802e8c] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.512363] env[61473]: DEBUG nova.network.neutron [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.524051] env[61473]: INFO nova.compute.manager [-] [instance: 079f1dc7-232a-4e21-9b0e-9fff2d16bab6] Took 0.05 seconds to deallocate network for instance. [ 977.530011] env[61473]: DEBUG nova.compute.manager [None req-90e8ba48-ad82-4613-8955-992f31aced7d tempest-ServerActionsTestOtherB-1298047411 tempest-ServerActionsTestOtherB-1298047411-project-member] [instance: 5a356b28-fa9a-4fe6-ab01-e5576d802e8c] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.554991] env[61473]: DEBUG oslo_concurrency.lockutils [None req-90e8ba48-ad82-4613-8955-992f31aced7d tempest-ServerActionsTestOtherB-1298047411 tempest-ServerActionsTestOtherB-1298047411-project-member] Lock "5a356b28-fa9a-4fe6-ab01-e5576d802e8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 246.050s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.582921] env[61473]: DEBUG nova.compute.manager [None req-4da6b0dc-4923-481d-8362-195e9ac40035 tempest-ImagesNegativeTestJSON-255037913 tempest-ImagesNegativeTestJSON-255037913-project-member] [instance: 19550523-59ec-4891-8695-9939b1baefbc] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.617099] env[61473]: DEBUG nova.compute.manager [None req-4da6b0dc-4923-481d-8362-195e9ac40035 tempest-ImagesNegativeTestJSON-255037913 tempest-ImagesNegativeTestJSON-255037913-project-member] [instance: 19550523-59ec-4891-8695-9939b1baefbc] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.648954] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4da6b0dc-4923-481d-8362-195e9ac40035 tempest-ImagesNegativeTestJSON-255037913 tempest-ImagesNegativeTestJSON-255037913-project-member] Lock "19550523-59ec-4891-8695-9939b1baefbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 245.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.662675] env[61473]: DEBUG nova.compute.manager [None req-8e79e260-7d31-4026-a6c2-ab25cdd92c62 tempest-AttachInterfacesV270Test-652163780 tempest-AttachInterfacesV270Test-652163780-project-member] [instance: 0091006b-084b-40e7-8d60-a2b43acc08a9] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.669510] env[61473]: DEBUG oslo_concurrency.lockutils [None req-bada6ee8-ca4d-4b46-a1d7-2b6799ec6f71 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "079f1dc7-232a-4e21-9b0e-9fff2d16bab6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.250s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.692489] env[61473]: DEBUG nova.compute.manager [None req-8e79e260-7d31-4026-a6c2-ab25cdd92c62 tempest-AttachInterfacesV270Test-652163780 tempest-AttachInterfacesV270Test-652163780-project-member] [instance: 0091006b-084b-40e7-8d60-a2b43acc08a9] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.716044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e79e260-7d31-4026-a6c2-ab25cdd92c62 tempest-AttachInterfacesV270Test-652163780 tempest-AttachInterfacesV270Test-652163780-project-member] Lock "0091006b-084b-40e7-8d60-a2b43acc08a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.388s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.725221] env[61473]: DEBUG nova.compute.manager [None req-82d9a436-76e8-4b8d-a827-886baff3acfd tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] [instance: 8ac20624-5031-495d-bdf0-a5a7938539a6] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.751467] env[61473]: DEBUG nova.compute.manager [None req-82d9a436-76e8-4b8d-a827-886baff3acfd tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] [instance: 8ac20624-5031-495d-bdf0-a5a7938539a6] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.772032] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82d9a436-76e8-4b8d-a827-886baff3acfd tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Lock "8ac20624-5031-495d-bdf0-a5a7938539a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.746s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.783961] env[61473]: DEBUG nova.compute.manager [None req-1b94502f-f249-4d32-b625-ca2bfb0b8053 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: 50977f71-35b5-46e5-8096-5725c8053295] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.809081] env[61473]: DEBUG nova.compute.manager [None req-1b94502f-f249-4d32-b625-ca2bfb0b8053 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: 50977f71-35b5-46e5-8096-5725c8053295] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.833379] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b94502f-f249-4d32-b625-ca2bfb0b8053 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "50977f71-35b5-46e5-8096-5725c8053295" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.578s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.846569] env[61473]: DEBUG nova.compute.manager [None req-f4ffbba0-f332-429b-b545-4216c2cadc85 tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] [instance: b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.870168] env[61473]: DEBUG nova.compute.manager [None req-f4ffbba0-f332-429b-b545-4216c2cadc85 tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] [instance: b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.895065] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f4ffbba0-f332-429b-b545-4216c2cadc85 tempest-ServerRescueNegativeTestJSON-1431149032 tempest-ServerRescueNegativeTestJSON-1431149032-project-member] Lock "b82e7ee5-fdf0-458d-8ffa-a4bdeb4fcdae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.037s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.904762] env[61473]: DEBUG nova.compute.manager [None req-e23d1f0b-e8ba-47dd-a230-a10bf782c727 tempest-VolumesAssistedSnapshotsTest-93010175 tempest-VolumesAssistedSnapshotsTest-93010175-project-member] [instance: d2946031-980f-4ee9-8818-f4d7584d4e3e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 977.938083] env[61473]: DEBUG nova.compute.manager [None req-e23d1f0b-e8ba-47dd-a230-a10bf782c727 tempest-VolumesAssistedSnapshotsTest-93010175 tempest-VolumesAssistedSnapshotsTest-93010175-project-member] [instance: d2946031-980f-4ee9-8818-f4d7584d4e3e] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 977.967643] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23d1f0b-e8ba-47dd-a230-a10bf782c727 tempest-VolumesAssistedSnapshotsTest-93010175 tempest-VolumesAssistedSnapshotsTest-93010175-project-member] Lock "d2946031-980f-4ee9-8818-f4d7584d4e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.064s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.977225] env[61473]: DEBUG nova.compute.manager [None req-990ec83e-5491-4cdb-be90-186c6affd0da tempest-ServersTestJSON-302007319 tempest-ServersTestJSON-302007319-project-member] [instance: b7aecc9b-4032-4e15-963e-6cc270af55f3] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 978.005591] env[61473]: DEBUG nova.compute.manager [None req-990ec83e-5491-4cdb-be90-186c6affd0da tempest-ServersTestJSON-302007319 tempest-ServersTestJSON-302007319-project-member] [instance: b7aecc9b-4032-4e15-963e-6cc270af55f3] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 978.033143] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ec83e-5491-4cdb-be90-186c6affd0da tempest-ServersTestJSON-302007319 tempest-ServersTestJSON-302007319-project-member] Lock "b7aecc9b-4032-4e15-963e-6cc270af55f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.864s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.042758] env[61473]: DEBUG nova.compute.manager [None req-61dded22-b485-46ab-943d-412d8f73c280 tempest-ServersNegativeTestMultiTenantJSON-1072421860 tempest-ServersNegativeTestMultiTenantJSON-1072421860-project-member] [instance: 77271f0d-6c43-4ecc-9211-e16c977b8531] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 978.068844] env[61473]: DEBUG nova.compute.manager [None req-61dded22-b485-46ab-943d-412d8f73c280 tempest-ServersNegativeTestMultiTenantJSON-1072421860 tempest-ServersNegativeTestMultiTenantJSON-1072421860-project-member] [instance: 77271f0d-6c43-4ecc-9211-e16c977b8531] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 978.091616] env[61473]: DEBUG oslo_concurrency.lockutils [None req-61dded22-b485-46ab-943d-412d8f73c280 tempest-ServersNegativeTestMultiTenantJSON-1072421860 tempest-ServersNegativeTestMultiTenantJSON-1072421860-project-member] Lock "77271f0d-6c43-4ecc-9211-e16c977b8531" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.491s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.104169] env[61473]: DEBUG nova.compute.manager [None req-4aa50229-b6a1-4807-8804-c8ac7cb132c8 tempest-ServerRescueTestJSONUnderV235-876916624 tempest-ServerRescueTestJSONUnderV235-876916624-project-member] [instance: f081b1bc-1d1f-4b5c-8690-e0186c1a7793] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 978.129421] env[61473]: DEBUG nova.compute.manager [None req-4aa50229-b6a1-4807-8804-c8ac7cb132c8 tempest-ServerRescueTestJSONUnderV235-876916624 tempest-ServerRescueTestJSONUnderV235-876916624-project-member] [instance: f081b1bc-1d1f-4b5c-8690-e0186c1a7793] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 978.153772] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4aa50229-b6a1-4807-8804-c8ac7cb132c8 tempest-ServerRescueTestJSONUnderV235-876916624 tempest-ServerRescueTestJSONUnderV235-876916624-project-member] Lock "f081b1bc-1d1f-4b5c-8690-e0186c1a7793" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.997s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.165031] env[61473]: DEBUG nova.compute.manager [None req-e1677eab-e1da-4b7f-9b3b-48dab26b689f tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: dd63d5b4-50a7-4dce-8e2f-3eac7d55e424] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 978.189269] env[61473]: DEBUG nova.compute.manager [None req-e1677eab-e1da-4b7f-9b3b-48dab26b689f tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] [instance: dd63d5b4-50a7-4dce-8e2f-3eac7d55e424] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 978.221319] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e1677eab-e1da-4b7f-9b3b-48dab26b689f tempest-DeleteServersAdminTestJSON-1649854680 tempest-DeleteServersAdminTestJSON-1649854680-project-member] Lock "dd63d5b4-50a7-4dce-8e2f-3eac7d55e424" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.900s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.242420] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 978.317565] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.317796] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.319393] env[61473]: INFO nova.compute.claims [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.748035] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d8d3f5-11f5-4afe-96aa-7c7e26e7c6d2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.756034] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892cd59c-81ff-430a-8b2b-8e0ea830ffe2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.787524] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece7a762-4bbf-4c20-8bf6-ea4732ad6d3b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.795934] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3058c81-9485-4fdb-ab2d-464ce7920d40 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.810707] env[61473]: DEBUG nova.compute.provider_tree [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.828824] env[61473]: DEBUG nova.scheduler.client.report [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.854894] env[61473]: DEBUG oslo_concurrency.lockutils [None req-33ed1588-1ada-46de-b776-567618bf47a3 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "651ebf44-9c99-41a3-b7fb-ab5914002e85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.855178] env[61473]: DEBUG oslo_concurrency.lockutils [None req-33ed1588-1ada-46de-b776-567618bf47a3 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "651ebf44-9c99-41a3-b7fb-ab5914002e85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.855694] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.538s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.856129] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 978.890873] env[61473]: DEBUG nova.compute.utils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.892053] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 978.892223] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 978.902830] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 978.992213] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 978.995508] env[61473]: DEBUG nova.policy [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03b09a062d8546478f768015a53f9ca2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2d10e82effd4884a8917509968438af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 979.035205] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.035487] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.035681] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.035920] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.036147] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.036410] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.036639] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.036873] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.037148] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.037351] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.037569] env[61473]: DEBUG nova.virt.hardware [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.038489] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19076202-a0e5-4fe8-bc68-4ee19932670c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.048128] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4573c923-e8de-4607-b1e4-1b9bcb074a2c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.858965] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Successfully created port: 800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.482658] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "284671f0-2679-4344-86fa-4ea0f05f09bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.482959] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.541531] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Successfully updated port: 800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 981.564222] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.564385] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquired lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.564544] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 981.644378] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.655985] env[61473]: DEBUG nova.compute.manager [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Received event network-vif-plugged-800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 981.656215] env[61473]: DEBUG oslo_concurrency.lockutils [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] Acquiring lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.656416] env[61473]: DEBUG oslo_concurrency.lockutils [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.656584] env[61473]: DEBUG oslo_concurrency.lockutils [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.656746] env[61473]: DEBUG nova.compute.manager [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] No waiting events found dispatching network-vif-plugged-800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 981.656950] env[61473]: WARNING nova.compute.manager [req-70edbadd-eb6c-4b73-be31-f04ed52b712c req-27ff8fd9-16e5-4b3f-a208-4ceaa56397da service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Received unexpected event network-vif-plugged-800c5a80-5479-4a82-89af-9d1bae8a091b for instance with vm_state building and task_state spawning. [ 982.211483] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Updating instance_info_cache with network_info: [{"id": "800c5a80-5479-4a82-89af-9d1bae8a091b", "address": "fa:16:3e:23:2b:e2", "network": {"id": "5de24945-b104-4a5c-93aa-9511a5623964", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685722926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2d10e82effd4884a8917509968438af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800c5a80-54", "ovs_interfaceid": "800c5a80-5479-4a82-89af-9d1bae8a091b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.227747] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Releasing lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.228073] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance network_info: |[{"id": "800c5a80-5479-4a82-89af-9d1bae8a091b", "address": "fa:16:3e:23:2b:e2", "network": {"id": "5de24945-b104-4a5c-93aa-9511a5623964", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685722926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2d10e82effd4884a8917509968438af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800c5a80-54", "ovs_interfaceid": "800c5a80-5479-4a82-89af-9d1bae8a091b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 982.228504] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:2b:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '800c5a80-5479-4a82-89af-9d1bae8a091b', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.236789] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Creating folder: Project (e2d10e82effd4884a8917509968438af). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 982.237397] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97916900-e532-4c47-9ed0-3751285d8000 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.258337] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Created folder: Project (e2d10e82effd4884a8917509968438af) in parent group-v843485. [ 982.258642] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Creating folder: Instances. Parent ref: group-v843537. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 982.259448] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aebece29-f202-4a35-b01d-e000df324aa7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.273134] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Created folder: Instances in parent group-v843537. [ 982.273404] env[61473]: DEBUG oslo.service.loopingcall [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.273600] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 982.273806] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae38998a-b24f-414b-9587-998b1082cb2b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.303009] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.303009] env[61473]: value = "task-4281577" [ 982.303009] env[61473]: _type = "Task" [ 982.303009] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.311596] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281577, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.813942] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281577, 'name': CreateVM_Task, 'duration_secs': 0.374404} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.814267] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 982.814799] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.814988] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.815288] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 982.815594] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaedf7cf-b5b5-47a0-8be1-0cfd50ecff29 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.820598] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for the task: (returnval){ [ 982.820598] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52005cca-bdab-263d-980f-eaa25f0f3330" [ 982.820598] env[61473]: _type = "Task" [ 982.820598] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.829082] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52005cca-bdab-263d-980f-eaa25f0f3330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.331900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.331900] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.331900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.842828] env[61473]: DEBUG nova.compute.manager [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Received event network-changed-800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 983.843130] env[61473]: DEBUG nova.compute.manager [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Refreshing instance network info cache due to event network-changed-800c5a80-5479-4a82-89af-9d1bae8a091b. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 983.843294] env[61473]: DEBUG oslo_concurrency.lockutils [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] Acquiring lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.843438] env[61473]: DEBUG oslo_concurrency.lockutils [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] Acquired lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.843598] env[61473]: DEBUG nova.network.neutron [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Refreshing network info cache for port 800c5a80-5479-4a82-89af-9d1bae8a091b {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 984.231613] env[61473]: DEBUG nova.network.neutron [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Updated VIF entry in instance network info cache for port 800c5a80-5479-4a82-89af-9d1bae8a091b. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 984.231613] env[61473]: DEBUG nova.network.neutron [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Updating instance_info_cache with network_info: [{"id": "800c5a80-5479-4a82-89af-9d1bae8a091b", "address": "fa:16:3e:23:2b:e2", "network": {"id": "5de24945-b104-4a5c-93aa-9511a5623964", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685722926-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2d10e82effd4884a8917509968438af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800c5a80-54", "ovs_interfaceid": "800c5a80-5479-4a82-89af-9d1bae8a091b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.260091] env[61473]: DEBUG oslo_concurrency.lockutils [req-542bddf5-6633-45b2-803b-aadceb5fc090 req-21488517-3c27-470c-b47a-aa090051b432 service nova] Releasing lock "refresh_cache-442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.823154] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d14ec99-cc44-46c8-a08f-f5aa2d9eb390 tempest-ServerActionsTestOtherA-1494680182 tempest-ServerActionsTestOtherA-1494680182-project-member] Acquiring lock "55ef17e9-54f8-429e-91bb-22a9be430200" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.823386] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d14ec99-cc44-46c8-a08f-f5aa2d9eb390 tempest-ServerActionsTestOtherA-1494680182 tempest-ServerActionsTestOtherA-1494680182-project-member] Lock "55ef17e9-54f8-429e-91bb-22a9be430200" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.966112] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.966437] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.966484] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.966638] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.978163] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.978428] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.978661] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.978869] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 984.980009] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffbce0c-b32b-4185-8986-2f7c2880a619 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.989332] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b80e211-764b-4812-ac69-7fc7c21591d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.004416] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69566e70-e103-423d-8343-725f9c9d57c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.010951] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47a16e6-820e-496b-9faa-49f771605b8c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.039157] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180638MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 985.039324] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.039518] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.112050] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 5d67907c-7199-4734-a5cc-4466703eaa51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112239] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112371] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112494] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112610] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112726] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112838] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.112949] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.113072] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.113183] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 985.125920] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.137242] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.152080] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 95e4c8b2-41c9-4882-a5bf-0b4a7b14e726 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.165038] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 80de92ab-332a-4e1f-8cd0-61cbaa791e06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.179061] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bce47a9-edeb-4ecb-b946-c29b2f360ac4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.190513] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c5efb3fe-7432-4daf-9fff-518781b8f435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.201121] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance fc93905f-f07a-4735-9297-2dbc1e2b0066 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.214128] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9db4fe27-6702-41ec-b2c3-813918bbdb56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.226381] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 998bff91-85c3-4f70-8056-2e77a0d80f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.237067] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 889ee8c2-615c-477e-8fc5-65241759dc5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.247449] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.257307] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.267384] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bc8115c-903f-47f2-bf6a-a9272fe0f044 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.277633] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 651ebf44-9c99-41a3-b7fb-ab5914002e85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.287630] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 284671f0-2679-4344-86fa-4ea0f05f09bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.298015] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 55ef17e9-54f8-429e-91bb-22a9be430200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 985.298015] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 985.298015] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 985.598871] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbeead8-3dbe-4e2f-8221-0f01ce4fa1c1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.607431] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ed4e02-db13-484f-9dd6-5b09dc0fa39e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.638508] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b77ddac-728a-4136-9ac5-b7f2f8c24d9c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.647060] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62195dfa-cbf1-49da-9b32-8c7de3e2acaa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.661200] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.672372] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 985.688489] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 985.688638] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.649s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.684588] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.709946] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.710161] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.710303] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 986.986835] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.965742] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.966061] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 988.966061] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 988.986839] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.986990] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987068] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987190] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987314] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987432] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987551] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987668] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987795] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.987911] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 988.988039] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 988.988510] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1025.226868] env[61473]: WARNING oslo_vmware.rw_handles [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1025.226868] env[61473]: ERROR oslo_vmware.rw_handles [ 1025.227374] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1025.229209] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1025.229479] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Copying Virtual Disk [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/d086b543-d9cd-4d31-87d5-10296b12986c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1025.229771] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd62b4f5-7469-4b1c-bde7-0e8f8a672ea5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.237798] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1025.237798] env[61473]: value = "task-4281578" [ 1025.237798] env[61473]: _type = "Task" [ 1025.237798] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.245687] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.749092] env[61473]: DEBUG oslo_vmware.exceptions [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1025.749186] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.749695] env[61473]: ERROR nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1025.749695] env[61473]: Faults: ['InvalidArgument'] [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Traceback (most recent call last): [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] yield resources [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self.driver.spawn(context, instance, image_meta, [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self._fetch_image_if_missing(context, vi) [ 1025.749695] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] image_cache(vi, tmp_image_ds_loc) [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] vm_util.copy_virtual_disk( [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] session._wait_for_task(vmdk_copy_task) [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return self.wait_for_task(task_ref) [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return evt.wait() [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] result = hub.switch() [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1025.749990] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return self.greenlet.switch() [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self.f(*self.args, **self.kw) [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] raise exceptions.translate_fault(task_info.error) [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Faults: ['InvalidArgument'] [ 1025.750291] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] [ 1025.750291] env[61473]: INFO nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Terminating instance [ 1025.751568] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.751772] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1025.752073] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2af3bfe-99c4-4c30-a3bb-926122beabf2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.754209] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1025.754415] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1025.755145] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6738337c-d5ae-4b13-b536-fda17327f9c1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.761766] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1025.761999] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc729f78-71ef-4446-b1ba-b5f0cc0ec8a2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.764076] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1025.764272] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1025.765207] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b9d440-ac1a-4987-a5d4-f35193801fdb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.769542] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for the task: (returnval){ [ 1025.769542] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]522e6b76-f23f-5fbb-d415-b6bde4109d28" [ 1025.769542] env[61473]: _type = "Task" [ 1025.769542] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.779712] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]522e6b76-f23f-5fbb-d415-b6bde4109d28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.830163] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1025.830401] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1025.830584] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleting the datastore file [datastore2] 5d67907c-7199-4734-a5cc-4466703eaa51 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.830848] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-154d306e-e7d8-4e4e-ae6b-72db096b70bf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.837253] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1025.837253] env[61473]: value = "task-4281580" [ 1025.837253] env[61473]: _type = "Task" [ 1025.837253] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.844786] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.280390] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1026.280631] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Creating directory with path [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.280791] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9664b5c-eb95-4077-97b8-a86867c8c0ad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.292083] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Created directory with path [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.292282] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Fetch image to [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1026.292455] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1026.293202] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50b09ef-57c7-441e-9bcb-f1232a80da51 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.299644] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ff5cc3-d0f8-4731-a6cc-3ecfd2b529e8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.309475] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc03f4d3-ce88-4e92-9836-b166d09a4719 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.342420] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9359d203-b63b-475a-bb05-72d7bbd38bb7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.349766] env[61473]: DEBUG oslo_vmware.api [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071996} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.350875] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.351069] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.351241] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.351967] env[61473]: INFO nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1026.353226] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-610e26b2-43c4-4b99-b24d-60d598810c99 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.355107] env[61473]: DEBUG nova.compute.claims [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1026.355367] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.355587] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.389175] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1026.446218] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1026.505639] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1026.505840] env[61473]: DEBUG oslo_vmware.rw_handles [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1026.777091] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd906c9-19f6-42b0-babb-2a61c6baa3a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.784919] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82680bc-6f37-4f59-aa48-5a6f47b97d1f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.819020] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95ca83c-6941-421c-b934-d6e286086e32 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.823738] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71562953-0c94-4efa-9367-a2ac30c8d769 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.838362] env[61473]: DEBUG nova.compute.provider_tree [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.847574] env[61473]: DEBUG nova.scheduler.client.report [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1026.864258] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.509s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.864785] env[61473]: ERROR nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.864785] env[61473]: Faults: ['InvalidArgument'] [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Traceback (most recent call last): [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self.driver.spawn(context, instance, image_meta, [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self._fetch_image_if_missing(context, vi) [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] image_cache(vi, tmp_image_ds_loc) [ 1026.864785] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] vm_util.copy_virtual_disk( [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] session._wait_for_task(vmdk_copy_task) [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return self.wait_for_task(task_ref) [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return evt.wait() [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] result = hub.switch() [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] return self.greenlet.switch() [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1026.865153] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] self.f(*self.args, **self.kw) [ 1026.865482] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1026.865482] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] raise exceptions.translate_fault(task_info.error) [ 1026.865482] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.865482] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Faults: ['InvalidArgument'] [ 1026.865482] env[61473]: ERROR nova.compute.manager [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] [ 1026.865595] env[61473]: DEBUG nova.compute.utils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1026.866964] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Build of instance 5d67907c-7199-4734-a5cc-4466703eaa51 was re-scheduled: A specified parameter was not correct: fileType [ 1026.866964] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1026.867355] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1026.867530] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1026.867683] env[61473]: DEBUG nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1026.867856] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.185423] env[61473]: DEBUG nova.network.neutron [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.197221] env[61473]: INFO nova.compute.manager [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Took 0.33 seconds to deallocate network for instance. [ 1027.299215] env[61473]: INFO nova.scheduler.client.report [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleted allocations for instance 5d67907c-7199-4734-a5cc-4466703eaa51 [ 1027.324386] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8b9b2523-1081-4ece-a083-c520e3d50c2e tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 352.431s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.325599] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 345.129s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.325794] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] During sync_power_state the instance has a pending task (spawning). Skip. [ 1027.325971] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.326519] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 155.182s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.326736] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.326938] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.327116] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.329098] env[61473]: INFO nova.compute.manager [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Terminating instance [ 1027.331398] env[61473]: DEBUG nova.compute.manager [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1027.331603] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1027.332364] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37307cfd-1484-4982-bfe6-7c2a02eabe06 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.338126] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1027.349326] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df25e97-96fa-433a-a385-7f9fa0ea8852 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.380554] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d67907c-7199-4734-a5cc-4466703eaa51 could not be found. [ 1027.380775] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.380932] env[61473]: INFO nova.compute.manager [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1027.381192] env[61473]: DEBUG oslo.service.loopingcall [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.383587] env[61473]: DEBUG nova.compute.manager [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1027.383695] env[61473]: DEBUG nova.network.neutron [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.398106] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.398347] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.399958] env[61473]: INFO nova.compute.claims [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.412304] env[61473]: DEBUG nova.network.neutron [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.433290] env[61473]: INFO nova.compute.manager [-] [instance: 5d67907c-7199-4734-a5cc-4466703eaa51] Took 0.05 seconds to deallocate network for instance. [ 1027.561578] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a65057a0-198f-4002-a0a4-24c8bab39d72 tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "5d67907c-7199-4734-a5cc-4466703eaa51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.842596] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8286595c-309e-437c-9964-7816c8cfdb01 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.851463] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297ded7b-9177-42da-a8f6-f5f4926e0397 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.884287] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d52bf2-0658-4db8-a30e-20eae0dd0d2d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.891967] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49116f33-f969-4868-9136-62f5e8f23a87 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.905704] env[61473]: DEBUG nova.compute.provider_tree [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.916048] env[61473]: DEBUG nova.scheduler.client.report [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.933918] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.535s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.934662] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1027.974151] env[61473]: DEBUG nova.compute.utils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1027.977328] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1027.977328] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1027.988791] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1028.057035] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1028.084346] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1028.084643] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1028.084814] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.085000] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1028.085211] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.085306] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1028.085511] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1028.085668] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1028.085834] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1028.086028] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1028.086235] env[61473]: DEBUG nova.virt.hardware [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1028.087215] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f258bb6-c0c6-4e31-a5a3-02af2ab68690 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.091262] env[61473]: DEBUG nova.policy [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '679a463d26e64b3c8b61617fe97abf2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930bd6995c2a4a6d8b2f760d584e21bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1028.099777] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569c2c1d-cfa4-409e-b850-fbc7360d0e98 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.563363] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Successfully created port: b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.843028] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.843028] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.487695] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Successfully updated port: b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.496232] env[61473]: DEBUG nova.compute.manager [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Received event network-vif-plugged-b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1029.496470] env[61473]: DEBUG oslo_concurrency.lockutils [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] Acquiring lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.496653] env[61473]: DEBUG oslo_concurrency.lockutils [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] Lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.496958] env[61473]: DEBUG oslo_concurrency.lockutils [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] Lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.497058] env[61473]: DEBUG nova.compute.manager [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] No waiting events found dispatching network-vif-plugged-b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1029.498538] env[61473]: WARNING nova.compute.manager [req-bb3a5d9d-6077-41fb-b4a8-558d3f364298 req-3489237d-29ed-4ac6-b152-061de77dc252 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Received unexpected event network-vif-plugged-b0b821ac-2603-48ba-9367-11a2206a9f9b for instance with vm_state building and task_state spawning. [ 1029.500771] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.500913] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.501069] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1029.567839] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.986206] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Updating instance_info_cache with network_info: [{"id": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "address": "fa:16:3e:7d:55:4c", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b821ac-26", "ovs_interfaceid": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.000894] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.001237] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance network_info: |[{"id": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "address": "fa:16:3e:7d:55:4c", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b821ac-26", "ovs_interfaceid": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1030.001653] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:55:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0b821ac-2603-48ba-9367-11a2206a9f9b', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.009827] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating folder: Project (930bd6995c2a4a6d8b2f760d584e21bf). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1030.009910] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cf3ca13-c95a-4b7f-8912-6729cd443906 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.022921] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created folder: Project (930bd6995c2a4a6d8b2f760d584e21bf) in parent group-v843485. [ 1030.023118] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating folder: Instances. Parent ref: group-v843540. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1030.023349] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31c5d55f-783d-4083-adff-54d06757c9c7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.031520] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created folder: Instances in parent group-v843540. [ 1030.031748] env[61473]: DEBUG oslo.service.loopingcall [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.031925] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1030.032204] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-997e99bf-9d36-4f58-adcf-3f9c88c5f4c0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.051637] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1030.051637] env[61473]: value = "task-4281583" [ 1030.051637] env[61473]: _type = "Task" [ 1030.051637] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.062720] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281583, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.561701] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281583, 'name': CreateVM_Task, 'duration_secs': 0.300288} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.561875] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1030.562625] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.562795] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.563136] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1030.563383] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1484808-296a-48c0-be77-7d8a530e4b53 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.567660] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1030.567660] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5271cbd6-f61c-6b91-95df-f6aea0ff3fea" [ 1030.567660] env[61473]: _type = "Task" [ 1030.567660] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.576196] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5271cbd6-f61c-6b91-95df-f6aea0ff3fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.078884] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.078884] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.078884] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.555177] env[61473]: DEBUG nova.compute.manager [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Received event network-changed-b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1031.555352] env[61473]: DEBUG nova.compute.manager [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Refreshing instance network info cache due to event network-changed-b0b821ac-2603-48ba-9367-11a2206a9f9b. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1031.555575] env[61473]: DEBUG oslo_concurrency.lockutils [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] Acquiring lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.555717] env[61473]: DEBUG oslo_concurrency.lockutils [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] Acquired lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.555875] env[61473]: DEBUG nova.network.neutron [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Refreshing network info cache for port b0b821ac-2603-48ba-9367-11a2206a9f9b {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1031.856248] env[61473]: DEBUG nova.network.neutron [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Updated VIF entry in instance network info cache for port b0b821ac-2603-48ba-9367-11a2206a9f9b. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1031.856613] env[61473]: DEBUG nova.network.neutron [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Updating instance_info_cache with network_info: [{"id": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "address": "fa:16:3e:7d:55:4c", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b821ac-26", "ovs_interfaceid": "b0b821ac-2603-48ba-9367-11a2206a9f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.865955] env[61473]: DEBUG oslo_concurrency.lockutils [req-ecd45c62-7600-404c-ba9b-82973777bd36 req-61d770d5-e3bb-4455-a713-fa21b420c1a8 service nova] Releasing lock "refresh_cache-9031b0d9-4e07-4afa-a597-770b80df2511" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.962609] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.967044] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.967044] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.967044] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1045.967390] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.983185] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.983444] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.983631] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.983790] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1045.984938] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8c1686-d6a5-4040-bf14-7232c416c7a7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.993697] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54cc4cf-7877-44bc-9e8f-1a4ee014939b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.007508] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffefd2bd-b79d-4fb6-92ba-83e540b15a85 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.013845] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567ac4b1-9014-49a2-9576-543be9cd7e8c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.043507] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180639MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1046.043656] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.043851] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.208265] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.208390] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.208479] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.208609] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.208730] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.209749] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.209749] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.209749] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.209749] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.209923] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.221223] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.233615] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 95e4c8b2-41c9-4882-a5bf-0b4a7b14e726 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.245050] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 80de92ab-332a-4e1f-8cd0-61cbaa791e06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.255480] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bce47a9-edeb-4ecb-b946-c29b2f360ac4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.265202] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c5efb3fe-7432-4daf-9fff-518781b8f435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.275516] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance fc93905f-f07a-4735-9297-2dbc1e2b0066 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.285171] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9db4fe27-6702-41ec-b2c3-813918bbdb56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.298531] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 998bff91-85c3-4f70-8056-2e77a0d80f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.309864] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 889ee8c2-615c-477e-8fc5-65241759dc5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.319668] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.329957] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.339736] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bc8115c-903f-47f2-bf6a-a9272fe0f044 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.350218] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 651ebf44-9c99-41a3-b7fb-ab5914002e85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.361634] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 284671f0-2679-4344-86fa-4ea0f05f09bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.371905] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 55ef17e9-54f8-429e-91bb-22a9be430200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.383752] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.384027] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.384206] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1046.684165] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1246beab-94cc-4ee2-9048-a6ba4e3bb8aa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.692014] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d66aa43-81d1-4964-9b54-38c050b936c7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.721970] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffbc85f-bb6b-4e69-9552-3015b9e42c68 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.729191] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc77b8e-ca8e-49b9-8b44-e74d331796b6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.742269] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.750991] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.774913] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1046.774913] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.731s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.775815] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.775815] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.966093] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.961909] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.965597] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.966463] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.966826] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1050.966826] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1050.998258] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.998438] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.998634] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.998705] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.998816] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.998971] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.999065] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.999185] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.999302] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.999417] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1050.999540] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1072.232787] env[61473]: WARNING oslo_vmware.rw_handles [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1072.232787] env[61473]: ERROR oslo_vmware.rw_handles [ 1072.233544] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1072.236018] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1072.236018] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Copying Virtual Disk [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/aebdd4cd-7e99-4834-a0d1-7fe1e00ac29d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1072.236243] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-581fdf5a-3733-4ba1-94e4-a573285cf8c5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.244491] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for the task: (returnval){ [ 1072.244491] env[61473]: value = "task-4281584" [ 1072.244491] env[61473]: _type = "Task" [ 1072.244491] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.252310] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Task: {'id': task-4281584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.755128] env[61473]: DEBUG oslo_vmware.exceptions [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1072.755435] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.755995] env[61473]: ERROR nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1072.755995] env[61473]: Faults: ['InvalidArgument'] [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Traceback (most recent call last): [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] yield resources [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self.driver.spawn(context, instance, image_meta, [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self._fetch_image_if_missing(context, vi) [ 1072.755995] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] image_cache(vi, tmp_image_ds_loc) [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] vm_util.copy_virtual_disk( [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] session._wait_for_task(vmdk_copy_task) [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return self.wait_for_task(task_ref) [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return evt.wait() [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] result = hub.switch() [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1072.756341] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return self.greenlet.switch() [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self.f(*self.args, **self.kw) [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] raise exceptions.translate_fault(task_info.error) [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Faults: ['InvalidArgument'] [ 1072.756687] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] [ 1072.756687] env[61473]: INFO nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Terminating instance [ 1072.757887] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.758113] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.758395] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6275186-03c0-4467-a759-eb44a0474a69 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.762504] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1072.762699] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1072.763477] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc977d4-1a4b-4c86-a45c-74da1467693a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.769861] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1072.770114] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea15d333-e76e-4673-b19d-0e6430102026 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.772286] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.772455] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1072.773411] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c774425-57a5-42f9-b1e4-2c367fb10869 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.778109] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1072.778109] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5238c5b5-d253-56a8-22d0-838b66f0fe48" [ 1072.778109] env[61473]: _type = "Task" [ 1072.778109] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.786939] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5238c5b5-d253-56a8-22d0-838b66f0fe48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.865418] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1072.865634] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1072.865817] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Deleting the datastore file [datastore2] 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.866111] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5e56b46-6c67-4d62-bc99-dda3296afff9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.872683] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for the task: (returnval){ [ 1072.872683] env[61473]: value = "task-4281586" [ 1072.872683] env[61473]: _type = "Task" [ 1072.872683] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.880156] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Task: {'id': task-4281586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.289189] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1073.289467] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating directory with path [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.289678] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16ec7ee3-4986-4cb9-b33d-5daf116158f5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.300646] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created directory with path [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.300825] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Fetch image to [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1073.300992] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1073.301705] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4363cb80-2a71-4eac-9f4b-1fd9c6c1ae23 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.307984] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b67e79-fa04-4c70-a43a-9111c7506882 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.316573] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afd6921-0fea-4f5b-beec-ae018c3b3115 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.346139] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d5f00d-5d81-4cdb-928a-a593a2fa56e0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.351465] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9cf5be4a-8a30-4cc3-a8a2-681c07087f46 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.373699] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1073.382921] env[61473]: DEBUG oslo_vmware.api [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Task: {'id': task-4281586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081682} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.383088] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.383302] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1073.383492] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1073.383676] env[61473]: INFO nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1073.386030] env[61473]: DEBUG nova.compute.claims [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1073.386109] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.386267] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.429505] env[61473]: DEBUG oslo_vmware.rw_handles [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1073.489427] env[61473]: DEBUG oslo_vmware.rw_handles [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1073.489618] env[61473]: DEBUG oslo_vmware.rw_handles [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1073.814846] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e33e439-a6a2-4f21-9cd1-53af84ecd236 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.823227] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc90cdf-7e46-4348-91d9-e1227a0cb320 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.853424] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c062a05-6192-4f12-9d53-6b5ec1c36d89 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.861582] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2957ea01-56de-4084-81e0-97dec674fec7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.875180] env[61473]: DEBUG nova.compute.provider_tree [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.885125] env[61473]: DEBUG nova.scheduler.client.report [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1073.901179] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.515s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.901758] env[61473]: ERROR nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.901758] env[61473]: Faults: ['InvalidArgument'] [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Traceback (most recent call last): [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self.driver.spawn(context, instance, image_meta, [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self._fetch_image_if_missing(context, vi) [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] image_cache(vi, tmp_image_ds_loc) [ 1073.901758] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] vm_util.copy_virtual_disk( [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] session._wait_for_task(vmdk_copy_task) [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return self.wait_for_task(task_ref) [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return evt.wait() [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] result = hub.switch() [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] return self.greenlet.switch() [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1073.902099] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] self.f(*self.args, **self.kw) [ 1073.902447] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1073.902447] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] raise exceptions.translate_fault(task_info.error) [ 1073.902447] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1073.902447] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Faults: ['InvalidArgument'] [ 1073.902447] env[61473]: ERROR nova.compute.manager [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] [ 1073.902584] env[61473]: DEBUG nova.compute.utils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1073.904245] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Build of instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 was re-scheduled: A specified parameter was not correct: fileType [ 1073.904245] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1073.904612] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1073.904784] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1073.904990] env[61473]: DEBUG nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1073.905180] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.262605] env[61473]: DEBUG nova.network.neutron [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.276517] env[61473]: INFO nova.compute.manager [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Took 0.37 seconds to deallocate network for instance. [ 1074.379438] env[61473]: INFO nova.scheduler.client.report [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Deleted allocations for instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 [ 1074.401964] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3c56ac89-d37b-4e3d-b3e2-0867a583fc0d tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 391.538s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.403140] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 192.787s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.403268] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Acquiring lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.403485] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.403660] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.406257] env[61473]: INFO nova.compute.manager [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Terminating instance [ 1074.407238] env[61473]: DEBUG nova.compute.manager [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1074.407431] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1074.407891] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f215351-7926-4121-b33e-4b7a6035ce88 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.413219] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1074.419346] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63137b6-451d-43f5-8d20-0b1a32dc23c7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.451696] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 921c348d-b2ed-4a9c-b2cf-bdac15ebff67 could not be found. [ 1074.451907] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1074.452104] env[61473]: INFO nova.compute.manager [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1074.452397] env[61473]: DEBUG oslo.service.loopingcall [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.452904] env[61473]: DEBUG nova.compute.manager [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1074.452904] env[61473]: DEBUG nova.network.neutron [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1074.481232] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.481490] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.482998] env[61473]: INFO nova.compute.claims [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.494112] env[61473]: DEBUG nova.network.neutron [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.527918] env[61473]: INFO nova.compute.manager [-] [instance: 921c348d-b2ed-4a9c-b2cf-bdac15ebff67] Took 0.07 seconds to deallocate network for instance. [ 1074.620663] env[61473]: DEBUG oslo_concurrency.lockutils [None req-667cb977-a698-48a1-a265-72acc429c45f tempest-FloatingIPsAssociationTestJSON-1858100920 tempest-FloatingIPsAssociationTestJSON-1858100920-project-member] Lock "921c348d-b2ed-4a9c-b2cf-bdac15ebff67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.874470] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1529c290-386d-4368-a546-7ac429f8ea20 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.882362] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be57bcb7-57cb-45e8-85c0-7e437e191d79 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.915477] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a92d394-5bbd-465b-8112-98ce62aed3f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.922619] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8550f79a-bb53-4087-9de6-37f5034b4ea8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.935992] env[61473]: DEBUG nova.compute.provider_tree [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.947400] env[61473]: DEBUG nova.scheduler.client.report [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1074.967021] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.482s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.967021] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1075.001155] env[61473]: DEBUG nova.compute.utils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.003019] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Not allocating networking since 'none' was specified. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1075.011022] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1075.084651] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1075.111947] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.112212] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.112371] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.112551] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.112698] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.112847] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.113587] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.113816] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.114010] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.114194] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.114376] env[61473]: DEBUG nova.virt.hardware [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.115232] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a663ce-10ba-465d-9bc1-1f8f115df96c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.123899] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d0361b-0894-4c9d-8b7f-4143471b7042 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.137980] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance VIF info [] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1075.143587] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Creating folder: Project (d908cb4cbf90408aa630f887d4621d24). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1075.144061] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-890c7dff-a28a-4bb8-b55a-8f748c711625 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.154910] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Created folder: Project (d908cb4cbf90408aa630f887d4621d24) in parent group-v843485. [ 1075.154910] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Creating folder: Instances. Parent ref: group-v843543. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1075.154910] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f84e4b4-048b-4620-9dbe-f6b86d37e98a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.163240] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Created folder: Instances in parent group-v843543. [ 1075.163517] env[61473]: DEBUG oslo.service.loopingcall [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1075.164880] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1075.164880] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-049c1bac-f2b9-4e8d-8744-b5b8ebb549f0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.179422] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1075.179422] env[61473]: value = "task-4281589" [ 1075.179422] env[61473]: _type = "Task" [ 1075.179422] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.188445] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281589, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.691459] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281589, 'name': CreateVM_Task, 'duration_secs': 0.28013} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.691810] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1075.692064] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.692234] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.692701] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1075.692797] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6ffcd32-68c7-4b7f-9639-f1d0cd85c726 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.698179] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for the task: (returnval){ [ 1075.698179] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52953da6-1c65-95a3-6710-887004055404" [ 1075.698179] env[61473]: _type = "Task" [ 1075.698179] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.705039] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52953da6-1c65-95a3-6710-887004055404, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.210097] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.210097] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.211523] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.053523] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "9031b0d9-4e07-4afa-a597-770b80df2511" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.127484] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.967842] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.968320] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.330649] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "7886aeef-40ea-45e5-afa4-d04ca469649e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.334797] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.892710] env[61473]: DEBUG oslo_concurrency.lockutils [None req-904fbe08-37d1-4095-a918-4c069ca2fc51 tempest-ServerExternalEventsTest-1763021243 tempest-ServerExternalEventsTest-1763021243-project-member] Acquiring lock "64a46ce2-e173-4d23-b5a0-32e28e0f068c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.893054] env[61473]: DEBUG oslo_concurrency.lockutils [None req-904fbe08-37d1-4095-a918-4c069ca2fc51 tempest-ServerExternalEventsTest-1763021243 tempest-ServerExternalEventsTest-1763021243-project-member] Lock "64a46ce2-e173-4d23-b5a0-32e28e0f068c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.428037] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df6b5c31-e662-4cf6-a209-0ebc96fbdee4 tempest-ServersListShow296Test-962093324 tempest-ServersListShow296Test-962093324-project-member] Acquiring lock "2002ff6b-8648-4ec4-be86-da7a0ee886f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.428284] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df6b5c31-e662-4cf6-a209-0ebc96fbdee4 tempest-ServersListShow296Test-962093324 tempest-ServersListShow296Test-962093324-project-member] Lock "2002ff6b-8648-4ec4-be86-da7a0ee886f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.297811] env[61473]: DEBUG oslo_concurrency.lockutils [None req-017474e5-b72d-4ba8-8ab2-954ed1627418 tempest-ServersTestManualDisk-71207237 tempest-ServersTestManualDisk-71207237-project-member] Acquiring lock "bea0e473-ff2e-453c-802a-84648b6d6c51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.298133] env[61473]: DEBUG oslo_concurrency.lockutils [None req-017474e5-b72d-4ba8-8ab2-954ed1627418 tempest-ServersTestManualDisk-71207237 tempest-ServersTestManualDisk-71207237-project-member] Lock "bea0e473-ff2e-453c-802a-84648b6d6c51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.825148] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.966682] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.967047] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1105.967047] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.979503] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.979730] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.979900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.981064] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1105.981214] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffe5521-2b2b-4708-a491-418234c01eed {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.990359] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf8dfc8-234f-4f40-b8b7-1a0f8847a69a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.004716] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267709e2-1ce5-41a3-81c0-95128d91559c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.011327] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be4a839-d0ef-499d-842a-96c648926abd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.040388] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180650MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1106.040552] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.040788] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.113822] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.113993] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114142] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114264] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114379] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114492] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114603] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114712] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114822] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.114932] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.126283] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 80de92ab-332a-4e1f-8cd0-61cbaa791e06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.137084] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bce47a9-edeb-4ecb-b946-c29b2f360ac4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.147462] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c5efb3fe-7432-4daf-9fff-518781b8f435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.157580] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance fc93905f-f07a-4735-9297-2dbc1e2b0066 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.167314] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9db4fe27-6702-41ec-b2c3-813918bbdb56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.178670] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 998bff91-85c3-4f70-8056-2e77a0d80f07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.188766] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 889ee8c2-615c-477e-8fc5-65241759dc5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.199822] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.210173] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.223982] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7bc8115c-903f-47f2-bf6a-a9272fe0f044 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.234244] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 651ebf44-9c99-41a3-b7fb-ab5914002e85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.244788] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 284671f0-2679-4344-86fa-4ea0f05f09bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.257223] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 55ef17e9-54f8-429e-91bb-22a9be430200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.266149] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.276962] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.286230] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.297177] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64a46ce2-e173-4d23-b5a0-32e28e0f068c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.306805] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2002ff6b-8648-4ec4-be86-da7a0ee886f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.317815] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bea0e473-ff2e-453c-802a-84648b6d6c51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.318235] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1106.318235] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1106.619631] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567c7bd5-c102-403b-b012-92aa79728a98 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.627289] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa349313-8b87-4590-9c84-ae44a8f5b1f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.656981] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b410d0-3fbd-4165-9f65-e4f7412d896a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.664086] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908fa64f-8e6d-481b-bb20-1e098929186f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.677114] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.685619] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1106.700810] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1106.700998] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.660s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.701767] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.966584] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.966667] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.965893] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.965893] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.962048] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.962512] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.986876] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.987153] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1110.987243] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1111.008942] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.009214] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.009409] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.009597] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.009870] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010087] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010308] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010492] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010669] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010978] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1111.010978] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1111.642048] env[61473]: DEBUG oslo_concurrency.lockutils [None req-178d1e0c-ac54-4d6b-b8b9-1171daa5b7be tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Acquiring lock "189c4110-3e1c-424e-8102-5b894fb27963" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.642048] env[61473]: DEBUG oslo_concurrency.lockutils [None req-178d1e0c-ac54-4d6b-b8b9-1171daa5b7be tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Lock "189c4110-3e1c-424e-8102-5b894fb27963" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.074220] env[61473]: DEBUG oslo_concurrency.lockutils [None req-86cd00ee-efdf-4a05-ad79-4bdad30b8303 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Acquiring lock "4c5fd04e-7c5f-4499-a9da-852301ecd9a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.074220] env[61473]: DEBUG oslo_concurrency.lockutils [None req-86cd00ee-efdf-4a05-ad79-4bdad30b8303 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "4c5fd04e-7c5f-4499-a9da-852301ecd9a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.252375] env[61473]: WARNING oslo_vmware.rw_handles [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1122.252375] env[61473]: ERROR oslo_vmware.rw_handles [ 1122.253153] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1122.254941] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1122.255229] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Copying Virtual Disk [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/adedb504-1351-4441-a1c8-195f7b1c6542/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1122.255532] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-628c44f8-2453-4709-8357-6863bbf56b69 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.264014] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1122.264014] env[61473]: value = "task-4281590" [ 1122.264014] env[61473]: _type = "Task" [ 1122.264014] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.277999] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.774665] env[61473]: DEBUG oslo_vmware.exceptions [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1122.774964] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.775555] env[61473]: ERROR nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1122.775555] env[61473]: Faults: ['InvalidArgument'] [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Traceback (most recent call last): [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] yield resources [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self.driver.spawn(context, instance, image_meta, [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self._fetch_image_if_missing(context, vi) [ 1122.775555] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] image_cache(vi, tmp_image_ds_loc) [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] vm_util.copy_virtual_disk( [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] session._wait_for_task(vmdk_copy_task) [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return self.wait_for_task(task_ref) [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return evt.wait() [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] result = hub.switch() [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1122.775952] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return self.greenlet.switch() [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self.f(*self.args, **self.kw) [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] raise exceptions.translate_fault(task_info.error) [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Faults: ['InvalidArgument'] [ 1122.776341] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] [ 1122.776341] env[61473]: INFO nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Terminating instance [ 1122.777569] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.777674] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.778294] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1122.778482] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1122.778703] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b05988d-2024-4c6e-becb-edcdc3c78ff3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.781250] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0ae9aa-cfe2-4468-b1a3-0dd28069e191 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.787938] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1122.788173] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f73e405-aa28-4722-820f-97b80c709b80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.790373] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.790550] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1122.791654] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3937e96-deab-4c53-abdb-60ef1fb165d6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.796918] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for the task: (returnval){ [ 1122.796918] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52d0ed1a-7691-597b-3f27-a4468777300b" [ 1122.796918] env[61473]: _type = "Task" [ 1122.796918] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.803683] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52d0ed1a-7691-597b-3f27-a4468777300b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.853048] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1122.853261] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1122.853499] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleting the datastore file [datastore2] 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.853829] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a837f478-145a-4e78-863f-5680d908ae86 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.860527] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1122.860527] env[61473]: value = "task-4281592" [ 1122.860527] env[61473]: _type = "Task" [ 1122.860527] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.868452] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.307846] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1123.308156] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Creating directory with path [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.308404] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94b003a2-d436-4763-bcd7-84faaf6adbf2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.320533] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Created directory with path [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.321697] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Fetch image to [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1123.321697] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1123.322023] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb131222-bd14-41c7-b12b-47d5ff3cb58f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.329262] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890f01a2-e9cf-4751-9308-99244dd14b4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.338955] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd29b685-6ae4-4dd7-8487-bf527b1f2df1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.373041] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4316de7f-f8d7-4aa8-b90b-b7cddfdbff52 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.380033] env[61473]: DEBUG oslo_vmware.api [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079435} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.381572] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.381764] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1123.381938] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1123.382136] env[61473]: INFO nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1123.383964] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9e81d0ec-90b9-4f8b-b12a-769dad40bf95 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.385898] env[61473]: DEBUG nova.compute.claims [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1123.386085] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.386302] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.409714] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1123.474865] env[61473]: DEBUG oslo_vmware.rw_handles [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1123.538240] env[61473]: DEBUG oslo_vmware.rw_handles [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1123.538453] env[61473]: DEBUG oslo_vmware.rw_handles [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1123.824945] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4281c1dc-8278-440e-b4db-adf14571de14 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.833188] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fb9027-645e-427b-a2a5-a4924996bc4e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.864664] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d85d109-8cd9-4220-acc0-80724b4720b7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.872190] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d72916-82b0-46ca-a83a-300233c5a5f8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.886788] env[61473]: DEBUG nova.compute.provider_tree [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.897916] env[61473]: DEBUG nova.scheduler.client.report [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1123.914945] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.528s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.915574] env[61473]: ERROR nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1123.915574] env[61473]: Faults: ['InvalidArgument'] [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Traceback (most recent call last): [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self.driver.spawn(context, instance, image_meta, [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self._fetch_image_if_missing(context, vi) [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] image_cache(vi, tmp_image_ds_loc) [ 1123.915574] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] vm_util.copy_virtual_disk( [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] session._wait_for_task(vmdk_copy_task) [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return self.wait_for_task(task_ref) [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return evt.wait() [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] result = hub.switch() [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] return self.greenlet.switch() [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1123.915922] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] self.f(*self.args, **self.kw) [ 1123.916258] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1123.916258] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] raise exceptions.translate_fault(task_info.error) [ 1123.916258] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1123.916258] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Faults: ['InvalidArgument'] [ 1123.916258] env[61473]: ERROR nova.compute.manager [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] [ 1123.916376] env[61473]: DEBUG nova.compute.utils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1123.918697] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Build of instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 was re-scheduled: A specified parameter was not correct: fileType [ 1123.918697] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1123.919088] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1123.919263] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1123.919806] env[61473]: DEBUG nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1123.920042] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1124.645313] env[61473]: DEBUG nova.network.neutron [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.657271] env[61473]: INFO nova.compute.manager [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Took 0.74 seconds to deallocate network for instance. [ 1124.807033] env[61473]: INFO nova.scheduler.client.report [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleted allocations for instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 [ 1124.827563] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4407f02c-ce4c-4ab3-8087-977d9adf2211 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 441.408s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.828317] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 42.701s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.828577] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.828834] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.829032] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.832110] env[61473]: INFO nova.compute.manager [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Terminating instance [ 1124.834492] env[61473]: DEBUG nova.compute.manager [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1124.834699] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1124.834948] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0676e9ce-9e99-4351-9bde-04e2703a9b22 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.847261] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09647bf2-a1c1-415e-8403-640138208f0e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.864170] env[61473]: DEBUG nova.compute.manager [None req-479a6f6f-3fd5-4cfd-ad35-8e2bc5fc1709 tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 95e4c8b2-41c9-4882-a5bf-0b4a7b14e726] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1124.887509] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26 could not be found. [ 1124.887718] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1124.887898] env[61473]: INFO nova.compute.manager [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1124.888179] env[61473]: DEBUG oslo.service.loopingcall [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1124.888413] env[61473]: DEBUG nova.compute.manager [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1124.888523] env[61473]: DEBUG nova.network.neutron [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1124.892910] env[61473]: DEBUG nova.compute.manager [None req-479a6f6f-3fd5-4cfd-ad35-8e2bc5fc1709 tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 95e4c8b2-41c9-4882-a5bf-0b4a7b14e726] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1124.929922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-479a6f6f-3fd5-4cfd-ad35-8e2bc5fc1709 tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "95e4c8b2-41c9-4882-a5bf-0b4a7b14e726" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.773s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.939284] env[61473]: DEBUG nova.network.neutron [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.943754] env[61473]: DEBUG nova.compute.manager [None req-82bcfb07-86a2-4064-987a-d284ffe93dd5 tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] [instance: 80de92ab-332a-4e1f-8cd0-61cbaa791e06] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1124.955328] env[61473]: INFO nova.compute.manager [-] [instance: 3320aaf1-c6a6-4c8a-8aea-bd14e0081c26] Took 0.07 seconds to deallocate network for instance. [ 1124.974822] env[61473]: DEBUG nova.compute.manager [None req-82bcfb07-86a2-4064-987a-d284ffe93dd5 tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] [instance: 80de92ab-332a-4e1f-8cd0-61cbaa791e06] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.004457] env[61473]: DEBUG oslo_concurrency.lockutils [None req-82bcfb07-86a2-4064-987a-d284ffe93dd5 tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Lock "80de92ab-332a-4e1f-8cd0-61cbaa791e06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.261s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.036256] env[61473]: DEBUG nova.compute.manager [None req-1095ce42-2e43-4064-85a1-0a9b9f2dbbb5 tempest-ServersAaction247Test-827318745 tempest-ServersAaction247Test-827318745-project-member] [instance: 7bce47a9-edeb-4ecb-b946-c29b2f360ac4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.095811] env[61473]: DEBUG nova.compute.manager [None req-1095ce42-2e43-4064-85a1-0a9b9f2dbbb5 tempest-ServersAaction247Test-827318745 tempest-ServersAaction247Test-827318745-project-member] [instance: 7bce47a9-edeb-4ecb-b946-c29b2f360ac4] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.141508] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1095ce42-2e43-4064-85a1-0a9b9f2dbbb5 tempest-ServersAaction247Test-827318745 tempest-ServersAaction247Test-827318745-project-member] Lock "7bce47a9-edeb-4ecb-b946-c29b2f360ac4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.365s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.156528] env[61473]: DEBUG nova.compute.manager [None req-7a313f71-36c0-41db-b339-79371e280ecf tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] [instance: c5efb3fe-7432-4daf-9fff-518781b8f435] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.161231] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4883904b-9441-4d73-9a7f-4455a1b8ab35 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "3320aaf1-c6a6-4c8a-8aea-bd14e0081c26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.331s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.197534] env[61473]: DEBUG nova.compute.manager [None req-7a313f71-36c0-41db-b339-79371e280ecf tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] [instance: c5efb3fe-7432-4daf-9fff-518781b8f435] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.225420] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7a313f71-36c0-41db-b339-79371e280ecf tempest-SecurityGroupsTestJSON-1315608171 tempest-SecurityGroupsTestJSON-1315608171-project-member] Lock "c5efb3fe-7432-4daf-9fff-518781b8f435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.254s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.236214] env[61473]: DEBUG nova.compute.manager [None req-9370e05d-b156-414b-b0ce-1be355a39b10 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: fc93905f-f07a-4735-9297-2dbc1e2b0066] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.267691] env[61473]: DEBUG nova.compute.manager [None req-9370e05d-b156-414b-b0ce-1be355a39b10 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: fc93905f-f07a-4735-9297-2dbc1e2b0066] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.300074] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9370e05d-b156-414b-b0ce-1be355a39b10 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "fc93905f-f07a-4735-9297-2dbc1e2b0066" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.709s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.314562] env[61473]: DEBUG nova.compute.manager [None req-a6565c74-f523-447f-8828-aa5b82650a55 tempest-ServerRescueTestJSON-1102761856 tempest-ServerRescueTestJSON-1102761856-project-member] [instance: 9db4fe27-6702-41ec-b2c3-813918bbdb56] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.356010] env[61473]: DEBUG nova.compute.manager [None req-a6565c74-f523-447f-8828-aa5b82650a55 tempest-ServerRescueTestJSON-1102761856 tempest-ServerRescueTestJSON-1102761856-project-member] [instance: 9db4fe27-6702-41ec-b2c3-813918bbdb56] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.387765] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a6565c74-f523-447f-8828-aa5b82650a55 tempest-ServerRescueTestJSON-1102761856 tempest-ServerRescueTestJSON-1102761856-project-member] Lock "9db4fe27-6702-41ec-b2c3-813918bbdb56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.451s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.400690] env[61473]: DEBUG nova.compute.manager [None req-0dadf6f4-5e3f-48b7-b284-16424aebaed1 tempest-ServerGroupTestJSON-969771004 tempest-ServerGroupTestJSON-969771004-project-member] [instance: 998bff91-85c3-4f70-8056-2e77a0d80f07] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.436961] env[61473]: DEBUG nova.compute.manager [None req-0dadf6f4-5e3f-48b7-b284-16424aebaed1 tempest-ServerGroupTestJSON-969771004 tempest-ServerGroupTestJSON-969771004-project-member] [instance: 998bff91-85c3-4f70-8056-2e77a0d80f07] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1125.466882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0dadf6f4-5e3f-48b7-b284-16424aebaed1 tempest-ServerGroupTestJSON-969771004 tempest-ServerGroupTestJSON-969771004-project-member] Lock "998bff91-85c3-4f70-8056-2e77a0d80f07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.117s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.485599] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1125.560596] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.561391] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.563018] env[61473]: INFO nova.compute.claims [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.019789] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84a9e0a-5d15-49a0-abdf-6c7773bddd96 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.028952] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b7b093-6fb3-4a44-b83f-5be06465645a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.062967] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8917eea-1522-410f-a279-588cc1a24e58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.070996] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5834281f-1e88-4162-b3aa-7286dfac8d8d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.084363] env[61473]: DEBUG nova.compute.provider_tree [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.099143] env[61473]: DEBUG nova.scheduler.client.report [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1126.124127] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.563s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.127698] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1126.167196] env[61473]: DEBUG nova.compute.utils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1126.169324] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1126.169527] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1126.179438] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1126.214967] env[61473]: INFO nova.virt.block_device [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Booting with volume 1666cc3b-8d9d-4ea8-914a-40fed1b8998f at /dev/sda [ 1126.263309] env[61473]: DEBUG nova.policy [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d02ccdd53254bc5a3f7360796e20daf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '789a379293bf4b8c8feeea856503f0cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1126.271119] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5858f022-ed7d-4a2a-ac06-82dfa28cd969 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.283555] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0234e478-ae84-411e-aea8-dbad48af7a44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.316718] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8351131-af45-4eac-b18f-6fed6e1e8afd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.325121] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fecca51-e114-4cfc-a781-cffc4d3c58be {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.360093] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df3354e-9c46-47ef-a1f5-b123f6ac5fbd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.367943] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4976067f-a418-4d08-9fea-343fc13b3a3b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.383852] env[61473]: DEBUG nova.virt.block_device [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updating existing volume attachment record: 25adcb4d-74f8-4896-aa8b-fd9a18275ec5 {{(pid=61473) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1126.746434] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1126.747034] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1126.747179] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1126.747569] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.747569] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1126.747636] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.747743] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1126.747957] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1126.748130] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1126.748306] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1126.748557] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1126.748774] env[61473]: DEBUG nova.virt.hardware [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1126.749936] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6e852d-9335-4eae-915c-bb1af0787b91 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.760796] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68a26a6-88ed-4525-8473-2e86170e7974 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.882932] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Successfully created port: 44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.221194] env[61473]: DEBUG nova.compute.manager [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Received event network-vif-plugged-44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1128.221463] env[61473]: DEBUG oslo_concurrency.lockutils [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] Acquiring lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.221683] env[61473]: DEBUG oslo_concurrency.lockutils [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.221917] env[61473]: DEBUG oslo_concurrency.lockutils [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.222045] env[61473]: DEBUG nova.compute.manager [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] No waiting events found dispatching network-vif-plugged-44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1128.222238] env[61473]: WARNING nova.compute.manager [req-edb66640-eec8-4ddd-a2de-eb9e9a6ff07f req-92d9b361-9c8f-449c-8031-f44ebceae254 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Received unexpected event network-vif-plugged-44bf01d8-c683-4726-a747-14f6cf045a75 for instance with vm_state building and task_state spawning. [ 1128.231214] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Successfully updated port: 44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.258167] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.258330] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquired lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.258750] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1128.317588] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1128.569773] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updating instance_info_cache with network_info: [{"id": "44bf01d8-c683-4726-a747-14f6cf045a75", "address": "fa:16:3e:dd:a0:a8", "network": {"id": "f6f9245f-f21f-48e4-b933-9782f6f7a169", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1826521190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "789a379293bf4b8c8feeea856503f0cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44bf01d8-c6", "ovs_interfaceid": "44bf01d8-c683-4726-a747-14f6cf045a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.587042] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Releasing lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.587042] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance network_info: |[{"id": "44bf01d8-c683-4726-a747-14f6cf045a75", "address": "fa:16:3e:dd:a0:a8", "network": {"id": "f6f9245f-f21f-48e4-b933-9782f6f7a169", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1826521190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "789a379293bf4b8c8feeea856503f0cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44bf01d8-c6", "ovs_interfaceid": "44bf01d8-c683-4726-a747-14f6cf045a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1128.587285] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:a0:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '140f4558-c11e-4af4-ab36-234e2d2f80a4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44bf01d8-c683-4726-a747-14f6cf045a75', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.593671] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Creating folder: Project (789a379293bf4b8c8feeea856503f0cb). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1128.594512] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-588dca39-a748-4896-a9bb-37b04105e9cd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.608512] env[61473]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1128.608756] env[61473]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61473) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1128.608990] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Folder already exists: Project (789a379293bf4b8c8feeea856503f0cb). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1128.609204] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Creating folder: Instances. Parent ref: group-v843526. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1128.609437] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-afdb5a20-2b6b-46d0-a9d1-58f4de1704c6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.619226] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Created folder: Instances in parent group-v843526. [ 1128.619499] env[61473]: DEBUG oslo.service.loopingcall [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.619687] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1128.619886] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf626484-bed5-4a82-8004-30b3d8d43f0e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.639337] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.639337] env[61473]: value = "task-4281595" [ 1128.639337] env[61473]: _type = "Task" [ 1128.639337] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.651575] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281595, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.154808] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281595, 'name': CreateVM_Task, 'duration_secs': 0.319055} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.154808] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1129.154808] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'attachment_id': '25adcb4d-74f8-4896-aa8b-fd9a18275ec5', 'guest_format': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843529', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'name': 'volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '889ee8c2-615c-477e-8fc5-65241759dc5f', 'attached_at': '', 'detached_at': '', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'serial': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61473) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1129.155172] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Root volume attach. Driver type: vmdk {{(pid=61473) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1129.155172] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b52525c-1630-407a-bfb2-11cd76645dd3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.166020] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c709d81c-2058-450f-88f5-ceeb8cbe43c0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.171132] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18256802-30ce-4df3-89f7-17d21a2afa64 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.178270] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-44ee679c-ddb3-4e8b-990c-061323b086ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.186023] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1129.186023] env[61473]: value = "task-4281596" [ 1129.186023] env[61473]: _type = "Task" [ 1129.186023] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.194516] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281596, 'name': RelocateVM_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.702525] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281596, 'name': RelocateVM_Task, 'duration_secs': 0.406746} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.702525] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Volume attach. Driver type: vmdk {{(pid=61473) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1129.702525] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843529', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'name': 'volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '889ee8c2-615c-477e-8fc5-65241759dc5f', 'attached_at': '', 'detached_at': '', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'serial': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f'} {{(pid=61473) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1129.703502] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc4c42d-2abc-4f80-9679-cf22fc3dc802 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.721701] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58dc477-eddc-48cf-aab1-e0eef5da3418 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.744968] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f/volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f.vmdk or device None with type thin {{(pid=61473) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.745326] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e59a52b2-6c3d-438d-bf83-6fccd688a208 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.770641] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1129.770641] env[61473]: value = "task-4281597" [ 1129.770641] env[61473]: _type = "Task" [ 1129.770641] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.780526] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.248793] env[61473]: DEBUG nova.compute.manager [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Received event network-changed-44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1130.248999] env[61473]: DEBUG nova.compute.manager [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Refreshing instance network info cache due to event network-changed-44bf01d8-c683-4726-a747-14f6cf045a75. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1130.249232] env[61473]: DEBUG oslo_concurrency.lockutils [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] Acquiring lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.249376] env[61473]: DEBUG oslo_concurrency.lockutils [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] Acquired lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.249553] env[61473]: DEBUG nova.network.neutron [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Refreshing network info cache for port 44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1130.282018] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281597, 'name': ReconfigVM_Task, 'duration_secs': 0.258729} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.282018] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Reconfigured VM instance instance-00000032 to attach disk [datastore2] volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f/volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f.vmdk or device None with type thin {{(pid=61473) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.291678] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ac37ef4-e08a-4d80-86c5-c8e4f495b5ea {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.308512] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1130.308512] env[61473]: value = "task-4281598" [ 1130.308512] env[61473]: _type = "Task" [ 1130.308512] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.318468] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281598, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.625505] env[61473]: DEBUG nova.network.neutron [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updated VIF entry in instance network info cache for port 44bf01d8-c683-4726-a747-14f6cf045a75. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1130.625505] env[61473]: DEBUG nova.network.neutron [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updating instance_info_cache with network_info: [{"id": "44bf01d8-c683-4726-a747-14f6cf045a75", "address": "fa:16:3e:dd:a0:a8", "network": {"id": "f6f9245f-f21f-48e4-b933-9782f6f7a169", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1826521190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "789a379293bf4b8c8feeea856503f0cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44bf01d8-c6", "ovs_interfaceid": "44bf01d8-c683-4726-a747-14f6cf045a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.634555] env[61473]: DEBUG oslo_concurrency.lockutils [req-701c6d3c-af45-40b7-b03e-5f441349019f req-d0316a70-891b-46a4-b1d1-f15484892b58 service nova] Releasing lock "refresh_cache-889ee8c2-615c-477e-8fc5-65241759dc5f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.822361] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281598, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.322819] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281598, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.821709] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281598, 'name': ReconfigVM_Task, 'duration_secs': 1.124797} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.823560] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843529', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'name': 'volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '889ee8c2-615c-477e-8fc5-65241759dc5f', 'attached_at': '', 'detached_at': '', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'serial': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f'} {{(pid=61473) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1131.824267] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8b02665-a674-47a0-852b-86f990002683 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.830853] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1131.830853] env[61473]: value = "task-4281599" [ 1131.830853] env[61473]: _type = "Task" [ 1131.830853] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.840405] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281599, 'name': Rename_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.860545] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "0367d64d-76f3-4483-bc17-77cd900569ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.860545] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.887207] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "a650e57a-85cf-416c-8787-a4ab98d4a930" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.887448] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.341098] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281599, 'name': Rename_Task, 'duration_secs': 0.127201} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.341417] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Powering on the VM {{(pid=61473) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1132.341621] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94e5950d-0a6c-4187-b742-672cf11fc0b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.349963] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1132.349963] env[61473]: value = "task-4281600" [ 1132.349963] env[61473]: _type = "Task" [ 1132.349963] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.358116] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.770507] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "889ee8c2-615c-477e-8fc5-65241759dc5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.861946] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281600, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.360528] env[61473]: DEBUG oslo_vmware.api [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281600, 'name': PowerOnVM_Task, 'duration_secs': 0.519325} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.360898] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Powered on the VM {{(pid=61473) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1133.361116] env[61473]: INFO nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1133.361365] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Checking state {{(pid=61473) _get_power_state /opt/stack/nova/nova/compute/manager.py:1787}} [ 1133.362146] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914bc6a5-c795-4715-aac3-f492cf3f19a6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.409698] env[61473]: DEBUG nova.compute.utils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Conflict updating instance 889ee8c2-615c-477e-8fc5-65241759dc5f. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1133.411421] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance disappeared during build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2509}} [ 1133.411596] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1133.411759] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1133.411959] env[61473]: DEBUG nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1133.412182] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1133.881350] env[61473]: DEBUG oslo_concurrency.lockutils [None req-042d6f0a-a899-4694-a201-e5abe67715bf tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "7e96360f-c62a-474e-a73e-9d7db6384987" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.881650] env[61473]: DEBUG oslo_concurrency.lockutils [None req-042d6f0a-a899-4694-a201-e5abe67715bf tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "7e96360f-c62a-474e-a73e-9d7db6384987" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.199907] env[61473]: DEBUG nova.network.neutron [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.221197] env[61473]: INFO nova.compute.manager [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Took 0.81 seconds to deallocate network for instance. [ 1134.325316] env[61473]: DEBUG nova.compute.manager [req-5407381b-1698-4abf-a11d-6f93fc5cda1f req-20c6554c-870d-4b46-a62b-b55608dd7255 service nova] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Received event network-vif-deleted-44bf01d8-c683-4726-a747-14f6cf045a75 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1134.332117] env[61473]: INFO nova.scheduler.client.report [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Deleted allocations for instance 889ee8c2-615c-477e-8fc5-65241759dc5f [ 1134.332398] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8277591e-e01b-43ac-97a1-3fd27f6d69bc tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.599s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.333933] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.564s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.334157] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.334449] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.334655] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.340905] env[61473]: INFO nova.compute.manager [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Terminating instance [ 1134.342512] env[61473]: DEBUG nova.compute.manager [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1134.342778] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Powering off the VM {{(pid=61473) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1134.343042] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-912e3b82-2ca5-4335-8386-252626664f4d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.347193] env[61473]: DEBUG nova.compute.manager [None req-55a4d609-802c-4ba1-83f2-43fd57ab1cc2 tempest-ServerAddressesNegativeTestJSON-253754799 tempest-ServerAddressesNegativeTestJSON-253754799-project-member] [instance: f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1134.355018] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1134.355018] env[61473]: value = "task-4281601" [ 1134.355018] env[61473]: _type = "Task" [ 1134.355018] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.360883] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.378941] env[61473]: DEBUG nova.compute.manager [None req-55a4d609-802c-4ba1-83f2-43fd57ab1cc2 tempest-ServerAddressesNegativeTestJSON-253754799 tempest-ServerAddressesNegativeTestJSON-253754799-project-member] [instance: f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1134.407648] env[61473]: DEBUG oslo_concurrency.lockutils [None req-55a4d609-802c-4ba1-83f2-43fd57ab1cc2 tempest-ServerAddressesNegativeTestJSON-253754799 tempest-ServerAddressesNegativeTestJSON-253754799-project-member] Lock "f9e60e39-7fd9-4ff7-900c-5e38a2d6b9f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.684s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.419212] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1134.475174] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.475486] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.477284] env[61473]: INFO nova.compute.claims [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.864674] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281601, 'name': PowerOffVM_Task, 'duration_secs': 0.193653} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.867186] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Powered off the VM {{(pid=61473) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1134.867392] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Volume detach. Driver type: vmdk {{(pid=61473) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1134.867579] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843529', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'name': 'volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '889ee8c2-615c-477e-8fc5-65241759dc5f', 'attached_at': '', 'detached_at': '', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'serial': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f'} {{(pid=61473) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1134.868542] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8109654a-38b0-4322-99e0-ba29bc3a60e7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.889347] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7749b8e3-1b4d-451b-b759-244d40674151 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.896391] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598cf7b2-4a61-4ad3-84ec-1d80aa4910d7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.916631] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6b7e30-809d-4ea5-96a9-864014003447 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.933563] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] The volume has not been displaced from its original location: [datastore2] volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f/volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f.vmdk. No consolidation needed. {{(pid=61473) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1134.939136] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=61473) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1134.940559] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa4ed4db-c1e9-43ff-9773-136856c05779 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.954248] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb966f4-24b8-48bd-8c1a-484728b91e2c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.961577] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446476ab-9958-4961-893d-ea2cc6102351 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.965914] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1134.965914] env[61473]: value = "task-4281602" [ 1134.965914] env[61473]: _type = "Task" [ 1134.965914] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.995966] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8285eee4-0ec7-499a-b6eb-86bf7305dab0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.001859] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.006642] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0eb771-ad00-4007-b598-3d524a09eb35 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.019960] env[61473]: DEBUG nova.compute.provider_tree [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.029792] env[61473]: DEBUG nova.scheduler.client.report [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1135.050364] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.573s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.050364] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1135.088924] env[61473]: DEBUG nova.compute.utils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1135.088924] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1135.088924] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1135.104955] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1135.184217] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1135.216535] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1135.216785] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1135.216952] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.217585] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1135.217768] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.218364] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1135.218605] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1135.218782] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1135.218962] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1135.219144] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1135.219601] env[61473]: DEBUG nova.virt.hardware [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1135.221437] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b787672-141f-407f-9eaa-28fc0307c78c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.226487] env[61473]: DEBUG nova.policy [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '433bd61523084b1c8b2a31a41044ff10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96c4c8af1c9244de9e4e7bcce5d083dd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1135.239348] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338fceb-8300-4902-9e3d-6636c0c1ee82 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.477602] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281602, 'name': ReconfigVM_Task, 'duration_secs': 0.161178} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.480268] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=61473) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1135.483577] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b9af2a-a5fa-47c2-944a-504510b3a5b0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.502413] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1135.502413] env[61473]: value = "task-4281603" [ 1135.502413] env[61473]: _type = "Task" [ 1135.502413] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.511998] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.016150] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281603, 'name': ReconfigVM_Task, 'duration_secs': 0.113879} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.016977] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843529', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'name': 'volume-1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '889ee8c2-615c-477e-8fc5-65241759dc5f', 'attached_at': '', 'detached_at': '', 'volume_id': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f', 'serial': '1666cc3b-8d9d-4ea8-914a-40fed1b8998f'} {{(pid=61473) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1136.019928] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1136.019928] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d764bc8d-8829-44c0-a6e3-0f9b5cf4f9fb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.027020] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1136.027143] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a8e92b9-2ca3-46f5-9db8-03965cfe8a92 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.086778] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1136.087016] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1136.087226] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Deleting the datastore file [datastore2] 889ee8c2-615c-477e-8fc5-65241759dc5f {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.087503] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3e947f5-3c69-4d82-b8d5-d8e6db70e5a9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.096078] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for the task: (returnval){ [ 1136.096078] env[61473]: value = "task-4281605" [ 1136.096078] env[61473]: _type = "Task" [ 1136.096078] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.107541] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.158939] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Successfully created port: b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1136.609518] env[61473]: DEBUG oslo_vmware.api [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Task: {'id': task-4281605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08051} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.609976] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.610324] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1136.610747] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1136.611082] env[61473]: INFO nova.compute.manager [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1136.613630] env[61473]: DEBUG oslo.service.loopingcall [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.613630] env[61473]: DEBUG nova.compute.manager [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1136.613630] env[61473]: DEBUG nova.network.neutron [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1136.701188] env[61473]: DEBUG nova.network.neutron [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.713992] env[61473]: INFO nova.compute.manager [-] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Took 0.10 seconds to deallocate network for instance. [ 1136.790810] env[61473]: INFO nova.compute.manager [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Took 0.08 seconds to detach 1 volumes for instance. [ 1136.792983] env[61473]: DEBUG nova.compute.manager [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Deleting volume: 1666cc3b-8d9d-4ea8-914a-40fed1b8998f {{(pid=61473) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3243}} [ 1136.880759] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.881226] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.881686] env[61473]: DEBUG nova.objects.instance [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lazy-loading 'resources' on Instance uuid 889ee8c2-615c-477e-8fc5-65241759dc5f {{(pid=61473) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.181941] env[61473]: DEBUG nova.compute.manager [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Received event network-vif-plugged-b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1137.182179] env[61473]: DEBUG oslo_concurrency.lockutils [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] Acquiring lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.182328] env[61473]: DEBUG oslo_concurrency.lockutils [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] Lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.182500] env[61473]: DEBUG oslo_concurrency.lockutils [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] Lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.182666] env[61473]: DEBUG nova.compute.manager [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] No waiting events found dispatching network-vif-plugged-b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1137.182863] env[61473]: WARNING nova.compute.manager [req-b0a88c08-6337-44eb-b92c-01acea477238 req-67463f42-6bb2-4669-9451-2500da65e18c service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Received unexpected event network-vif-plugged-b8669d35-d4ec-4dec-8e08-4859141653fd for instance with vm_state building and task_state spawning. [ 1137.272187] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "a12b01db-28b4-477d-aef2-99304505d8c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.291570] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27807e43-f54b-4939-980a-6dd618942223 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.299668] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf96d7b6-0e97-47e7-8bfe-711dc3330730 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.331705] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Successfully updated port: b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.333623] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83d8fa2-8586-4377-b51c-c4c1cf1c4cc1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.341493] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18fe0de-a858-4ba2-b9a0-c3b85e6376cb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.346724] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.346837] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquired lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.346958] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1137.359379] env[61473]: DEBUG nova.compute.provider_tree [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.366705] env[61473]: DEBUG nova.scheduler.client.report [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.385344] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.504s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.412717] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1137.459756] env[61473]: DEBUG oslo_concurrency.lockutils [None req-fcf8988f-28b4-41ce-b12e-a40430e33320 tempest-ServerActionsV293TestJSON-1547599870 tempest-ServerActionsV293TestJSON-1547599870-project-member] Lock "889ee8c2-615c-477e-8fc5-65241759dc5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.126s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.642602] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updating instance_info_cache with network_info: [{"id": "b8669d35-d4ec-4dec-8e08-4859141653fd", "address": "fa:16:3e:b1:96:0e", "network": {"id": "6e1b841a-16bc-4399-a12e-6c934bf8a1b1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1755286603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96c4c8af1c9244de9e4e7bcce5d083dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8669d35-d4", "ovs_interfaceid": "b8669d35-d4ec-4dec-8e08-4859141653fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.657359] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Releasing lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.657650] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance network_info: |[{"id": "b8669d35-d4ec-4dec-8e08-4859141653fd", "address": "fa:16:3e:b1:96:0e", "network": {"id": "6e1b841a-16bc-4399-a12e-6c934bf8a1b1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1755286603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96c4c8af1c9244de9e4e7bcce5d083dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8669d35-d4", "ovs_interfaceid": "b8669d35-d4ec-4dec-8e08-4859141653fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1137.658086] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:96:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd182e8eb-3f6d-4c76-a06e-133dd9b3cd30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8669d35-d4ec-4dec-8e08-4859141653fd', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1137.666676] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Creating folder: Project (96c4c8af1c9244de9e4e7bcce5d083dd). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1137.667295] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5007656-65b8-4e20-b1ca-0a98fe207b37 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.679011] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Created folder: Project (96c4c8af1c9244de9e4e7bcce5d083dd) in parent group-v843485. [ 1137.679011] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Creating folder: Instances. Parent ref: group-v843548. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1137.679171] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b052d3d-0806-4c20-9c8e-d36e2e614f44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.687894] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Created folder: Instances in parent group-v843548. [ 1137.688185] env[61473]: DEBUG oslo.service.loopingcall [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.688454] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1137.688756] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07bd6a38-ed42-465d-b8a7-bad33aec79bf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.721298] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1137.721298] env[61473]: value = "task-4281609" [ 1137.721298] env[61473]: _type = "Task" [ 1137.721298] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.729430] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281609, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.230873] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281609, 'name': CreateVM_Task, 'duration_secs': 0.319695} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.231131] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1138.231709] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.231882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.232221] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1138.232457] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5967591-79aa-402b-bcb5-2268cefcb368 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.236894] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for the task: (returnval){ [ 1138.236894] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52dc9009-b79c-8ebd-416f-9992d3596a7d" [ 1138.236894] env[61473]: _type = "Task" [ 1138.236894] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.244265] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52dc9009-b79c-8ebd-416f-9992d3596a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.753080] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.753353] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1138.753565] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.217788] env[61473]: DEBUG nova.compute.manager [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Received event network-changed-b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1139.218830] env[61473]: DEBUG nova.compute.manager [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Refreshing instance network info cache due to event network-changed-b8669d35-d4ec-4dec-8e08-4859141653fd. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1139.218830] env[61473]: DEBUG oslo_concurrency.lockutils [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] Acquiring lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.218830] env[61473]: DEBUG oslo_concurrency.lockutils [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] Acquired lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.218830] env[61473]: DEBUG nova.network.neutron [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Refreshing network info cache for port b8669d35-d4ec-4dec-8e08-4859141653fd {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1139.516399] env[61473]: DEBUG nova.network.neutron [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updated VIF entry in instance network info cache for port b8669d35-d4ec-4dec-8e08-4859141653fd. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1139.516399] env[61473]: DEBUG nova.network.neutron [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updating instance_info_cache with network_info: [{"id": "b8669d35-d4ec-4dec-8e08-4859141653fd", "address": "fa:16:3e:b1:96:0e", "network": {"id": "6e1b841a-16bc-4399-a12e-6c934bf8a1b1", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1755286603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96c4c8af1c9244de9e4e7bcce5d083dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8669d35-d4", "ovs_interfaceid": "b8669d35-d4ec-4dec-8e08-4859141653fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.525329] env[61473]: DEBUG oslo_concurrency.lockutils [req-dd8e0445-aa68-4f33-8e42-cacb5fb81150 req-113cb08d-0416-4a51-838a-f86d8efc88b8 service nova] Releasing lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.685426] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dcbc0336-2056-4e90-80a8-ad4849c11c1b tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Acquiring lock "8886a746-98cf-465d-b869-ebbe734ffa3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.685777] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dcbc0336-2056-4e90-80a8-ad4849c11c1b tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Lock "8886a746-98cf-465d-b869-ebbe734ffa3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.966257] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.966576] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.979074] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.979074] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.979074] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.979074] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1166.980077] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664e48aa-11e5-41ac-865c-82460a6430e7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.988954] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97870bb9-4c11-4333-b77a-a075d688d421 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.002833] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3675ad8d-5d1a-43a9-b4d7-baf9f9a0e742 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.008919] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c757dbc9-04f6-41c0-a6ce-f932a4ff3d3c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.037275] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180646MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1167.037421] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.037701] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.115050] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115222] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c6880758-25cf-4078-9455-827db6fb6435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115349] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115470] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115588] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115705] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115820] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.115933] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.116056] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.116171] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.128897] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 651ebf44-9c99-41a3-b7fb-ab5914002e85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.141487] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 284671f0-2679-4344-86fa-4ea0f05f09bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.153782] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 55ef17e9-54f8-429e-91bb-22a9be430200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.165020] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.174707] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.184967] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.194793] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64a46ce2-e173-4d23-b5a0-32e28e0f068c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.205338] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2002ff6b-8648-4ec4-be86-da7a0ee886f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.215080] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bea0e473-ff2e-453c-802a-84648b6d6c51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.225380] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 189c4110-3e1c-424e-8102-5b894fb27963 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.235161] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4c5fd04e-7c5f-4499-a9da-852301ecd9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.244479] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.254555] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.264448] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7e96360f-c62a-474e-a73e-9d7db6384987 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.274214] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8886a746-98cf-465d-b869-ebbe734ffa3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.274450] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1167.274598] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1167.550977] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3906fa81-5226-4a4e-b11d-8dd6262fe5ba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.559032] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210308c5-7c2e-4009-82a9-aa5b4d7d98e1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.588060] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56be9901-3a2f-4cad-a8d4-c4571e447eeb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.594981] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e338ef-bd3c-4a47-aab9-7f106164631e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.608305] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.618671] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.632622] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1167.632868] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.595s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.632583] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.632914] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1168.966777] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.965800] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.966221] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.966687] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.966966] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1170.967024] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1170.987830] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.987990] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c6880758-25cf-4078-9455-827db6fb6435] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988139] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988267] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988391] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988514] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988632] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988752] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988895] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.988984] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1170.989111] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1170.989582] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.637618] env[61473]: WARNING oslo_vmware.rw_handles [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1171.637618] env[61473]: ERROR oslo_vmware.rw_handles [ 1171.638082] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1171.640080] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1171.640350] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Copying Virtual Disk [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/9e155194-093c-41e3-9edc-939d220fc9d6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1171.640650] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c49157b-a4ab-4cb2-a722-d300669e38b6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.651318] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for the task: (returnval){ [ 1171.651318] env[61473]: value = "task-4281610" [ 1171.651318] env[61473]: _type = "Task" [ 1171.651318] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.659105] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Task: {'id': task-4281610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.984355] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.162625] env[61473]: DEBUG oslo_vmware.exceptions [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1172.163034] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.163697] env[61473]: ERROR nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1172.163697] env[61473]: Faults: ['InvalidArgument'] [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Traceback (most recent call last): [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] yield resources [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self.driver.spawn(context, instance, image_meta, [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self._fetch_image_if_missing(context, vi) [ 1172.163697] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] image_cache(vi, tmp_image_ds_loc) [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] vm_util.copy_virtual_disk( [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] session._wait_for_task(vmdk_copy_task) [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return self.wait_for_task(task_ref) [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return evt.wait() [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] result = hub.switch() [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1172.164133] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return self.greenlet.switch() [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self.f(*self.args, **self.kw) [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] raise exceptions.translate_fault(task_info.error) [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Faults: ['InvalidArgument'] [ 1172.165020] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] [ 1172.165020] env[61473]: INFO nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Terminating instance [ 1172.165731] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.165947] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.166203] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4efa61a3-8dbf-4119-a31e-4ecc81463fa4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.170533] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1172.170533] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1172.170533] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a21fabc-08d7-429a-875d-fa444d92c52a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.178494] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1172.178714] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad0e5259-4380-47e4-b1b9-972fa27b171b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.180985] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.181163] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1172.182160] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10f5a7a4-188a-4f75-9bdd-3e0415974f35 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.187203] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1172.187203] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52aecae6-b445-c6ca-1f44-897092437dc4" [ 1172.187203] env[61473]: _type = "Task" [ 1172.187203] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.195148] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52aecae6-b445-c6ca-1f44-897092437dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.245988] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1172.246280] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1172.246469] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Deleting the datastore file [datastore2] f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1172.246730] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-156113b4-fd58-4329-9bad-ad3a1505c582 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.252880] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for the task: (returnval){ [ 1172.252880] env[61473]: value = "task-4281612" [ 1172.252880] env[61473]: _type = "Task" [ 1172.252880] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.262453] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Task: {'id': task-4281612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.697207] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1172.697463] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating directory with path [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.697694] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3f9c020-0e78-45a9-9bdb-7631b5f87bdc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.708535] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Created directory with path [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.708648] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Fetch image to [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1172.708805] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1172.709521] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105b684c-67a3-4c53-b9fa-f40945cd6294 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.715606] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750fa799-6256-4936-a632-0ab47cba9cec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.724273] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad469866-ec5d-4ec6-a06d-7047b53c34d9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.758209] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb82ab3-5b72-44b6-b2db-97496d476546 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.765411] env[61473]: DEBUG oslo_vmware.api [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Task: {'id': task-4281612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090077} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.766861] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.767066] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1172.767243] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1172.767417] env[61473]: INFO nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1172.769328] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a934a4f6-48e3-471e-af4d-233b58470087 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.771381] env[61473]: DEBUG nova.compute.claims [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1172.771464] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.771661] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.794060] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1172.880165] env[61473]: DEBUG oslo_vmware.rw_handles [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1172.940020] env[61473]: DEBUG oslo_vmware.rw_handles [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1172.940195] env[61473]: DEBUG oslo_vmware.rw_handles [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1173.212211] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da87ec8a-e691-4d29-916d-ae8cae80ce55 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.220031] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788bd6f3-eb8a-4711-b19c-789bc252f9ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.249690] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fe8a87-b8f8-42ad-a652-208ea13aabdc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.257258] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a77432-d9ee-4432-b363-afca012a1fc3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.911013] env[61473]: DEBUG nova.compute.provider_tree [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.919835] env[61473]: DEBUG nova.scheduler.client.report [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.946520] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.175s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.947102] env[61473]: ERROR nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1173.947102] env[61473]: Faults: ['InvalidArgument'] [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Traceback (most recent call last): [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self.driver.spawn(context, instance, image_meta, [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self._fetch_image_if_missing(context, vi) [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] image_cache(vi, tmp_image_ds_loc) [ 1173.947102] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] vm_util.copy_virtual_disk( [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] session._wait_for_task(vmdk_copy_task) [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return self.wait_for_task(task_ref) [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return evt.wait() [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] result = hub.switch() [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] return self.greenlet.switch() [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1173.947426] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] self.f(*self.args, **self.kw) [ 1173.948049] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1173.948049] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] raise exceptions.translate_fault(task_info.error) [ 1173.948049] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1173.948049] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Faults: ['InvalidArgument'] [ 1173.948049] env[61473]: ERROR nova.compute.manager [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] [ 1173.948049] env[61473]: DEBUG nova.compute.utils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1173.949396] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Build of instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 was re-scheduled: A specified parameter was not correct: fileType [ 1173.949396] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1173.949763] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1173.949937] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1173.950119] env[61473]: DEBUG nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1173.950282] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1174.446020] env[61473]: DEBUG nova.network.neutron [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.458365] env[61473]: INFO nova.compute.manager [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Took 0.51 seconds to deallocate network for instance. [ 1174.605753] env[61473]: INFO nova.scheduler.client.report [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Deleted allocations for instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 [ 1174.625732] env[61473]: DEBUG oslo_concurrency.lockutils [None req-84fe5482-cc17-42a3-af18-b82a19dea09e tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 488.066s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.626849] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 290.378s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.627077] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Acquiring lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.627289] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.627500] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.629609] env[61473]: INFO nova.compute.manager [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Terminating instance [ 1174.631274] env[61473]: DEBUG nova.compute.manager [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1174.631468] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1174.631922] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9104a827-4428-47eb-bbe3-e5145e546827 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.641439] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195184ec-837d-46ce-831b-63b85aabe040 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.652547] env[61473]: DEBUG nova.compute.manager [None req-02a867fe-4b88-40d6-8150-7e8736df8e7d tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] [instance: 7bc8115c-903f-47f2-bf6a-a9272fe0f044] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1174.674765] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f24ff321-c4a3-4bd4-a60d-ac7c1d448e31 could not be found. [ 1174.674968] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1174.675161] env[61473]: INFO nova.compute.manager [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1174.675409] env[61473]: DEBUG oslo.service.loopingcall [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1174.675624] env[61473]: DEBUG nova.compute.manager [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1174.675719] env[61473]: DEBUG nova.network.neutron [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1174.679586] env[61473]: DEBUG nova.compute.manager [None req-02a867fe-4b88-40d6-8150-7e8736df8e7d tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] [instance: 7bc8115c-903f-47f2-bf6a-a9272fe0f044] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1174.710527] env[61473]: DEBUG nova.network.neutron [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.712525] env[61473]: DEBUG oslo_concurrency.lockutils [None req-02a867fe-4b88-40d6-8150-7e8736df8e7d tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Lock "7bc8115c-903f-47f2-bf6a-a9272fe0f044" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.911s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.721016] env[61473]: INFO nova.compute.manager [-] [instance: f24ff321-c4a3-4bd4-a60d-ac7c1d448e31] Took 0.05 seconds to deallocate network for instance. [ 1174.727158] env[61473]: DEBUG nova.compute.manager [None req-33ed1588-1ada-46de-b776-567618bf47a3 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 651ebf44-9c99-41a3-b7fb-ab5914002e85] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1174.755092] env[61473]: DEBUG nova.compute.manager [None req-33ed1588-1ada-46de-b776-567618bf47a3 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 651ebf44-9c99-41a3-b7fb-ab5914002e85] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1174.779661] env[61473]: DEBUG oslo_concurrency.lockutils [None req-33ed1588-1ada-46de-b776-567618bf47a3 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "651ebf44-9c99-41a3-b7fb-ab5914002e85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.924s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.788198] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1174.840241] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d2a75d1-c1e1-401a-a7a6-0e8841904fd7 tempest-ServersTestFqdnHostnames-1796241987 tempest-ServersTestFqdnHostnames-1796241987-project-member] Lock "f24ff321-c4a3-4bd4-a60d-ac7c1d448e31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.849998] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.850270] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.851873] env[61473]: INFO nova.compute.claims [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1175.197104] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ab5fe9-fe7b-4ca8-b22a-11eeedf1ae1f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.207782] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aeb738e-3094-4e72-accd-b8b110d9586c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.238114] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f50b07-a9a8-4892-a17e-6471f3fa2f3d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.245481] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06085fc-076d-42c4-bb8e-0492671f527a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.259708] env[61473]: DEBUG nova.compute.provider_tree [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.268477] env[61473]: DEBUG nova.scheduler.client.report [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.286497] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.436s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.286898] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1175.323393] env[61473]: DEBUG nova.compute.utils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1175.324776] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1175.324950] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1175.335758] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1175.372492] env[61473]: INFO nova.virt.block_device [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Booting with volume 2a5154ef-2d1c-4570-a2b6-1abd76c14e9c at /dev/sda [ 1175.402050] env[61473]: DEBUG nova.policy [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6dc267a7c794602952d2212e6f472f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f30e1d6a377748af8ec86289182876a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1175.423714] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e8b18a8-52a6-492f-bc33-d7222e783214 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.431455] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b3cbc8-e3bb-44fb-ac19-15fde44d5c3a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.464188] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb749795-eed6-4c5b-a04c-43e44509d344 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.472028] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8355040c-2f52-4146-9f4c-eeefb768102a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.499472] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730152b3-12b3-4219-9b40-9aafb06502a8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.506034] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dab8a8-2b34-41ee-ad4f-3143ad911e1a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.518518] env[61473]: DEBUG nova.virt.block_device [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updating existing volume attachment record: 2cd48526-9f61-4a38-b9ca-84e068ae54c2 {{(pid=61473) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1175.755023] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1175.755594] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1175.755814] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1175.755979] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.756179] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1175.756335] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.756470] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1175.756678] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1175.756839] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1175.757013] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1175.757364] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1175.757533] env[61473]: DEBUG nova.virt.hardware [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.758715] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f655d8-79c6-4dc7-afb7-bac897c7e4dc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.769967] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47540ccf-60c9-4745-b113-a4707f2f675f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.788880] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.789127] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.886934] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Successfully created port: 7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1176.500501] env[61473]: DEBUG nova.compute.manager [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Received event network-vif-plugged-7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1176.500809] env[61473]: DEBUG oslo_concurrency.lockutils [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] Acquiring lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.500933] env[61473]: DEBUG oslo_concurrency.lockutils [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.501109] env[61473]: DEBUG oslo_concurrency.lockutils [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.501276] env[61473]: DEBUG nova.compute.manager [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] No waiting events found dispatching network-vif-plugged-7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1176.501434] env[61473]: WARNING nova.compute.manager [req-3dec06c9-a56b-4647-b0e2-331eec0a551a req-b76965c4-2779-4ab3-88ca-938f89a5b189 service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Received unexpected event network-vif-plugged-7b6e63a5-0428-4761-b61e-0886343f930d for instance with vm_state building and task_state spawning. [ 1176.578260] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Successfully updated port: 7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.593025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.593118] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquired lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.593254] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1176.648373] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1176.844567] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updating instance_info_cache with network_info: [{"id": "7b6e63a5-0428-4761-b61e-0886343f930d", "address": "fa:16:3e:79:da:3e", "network": {"id": "f4e8ba23-427c-43ce-8c81-084e688d6b91", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-539903186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f30e1d6a377748af8ec86289182876a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6e63a5-04", "ovs_interfaceid": "7b6e63a5-0428-4761-b61e-0886343f930d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.855622] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Releasing lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.856255] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance network_info: |[{"id": "7b6e63a5-0428-4761-b61e-0886343f930d", "address": "fa:16:3e:79:da:3e", "network": {"id": "f4e8ba23-427c-43ce-8c81-084e688d6b91", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-539903186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f30e1d6a377748af8ec86289182876a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6e63a5-04", "ovs_interfaceid": "7b6e63a5-0428-4761-b61e-0886343f930d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1176.857161] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:da:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b6e63a5-0428-4761-b61e-0886343f930d', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1176.866072] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Creating folder: Project (f30e1d6a377748af8ec86289182876a3). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1176.867104] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17fd5d35-8cf0-41d9-953b-bb14d0c1e231 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.882915] env[61473]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1176.882915] env[61473]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61473) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1176.882915] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Folder already exists: Project (f30e1d6a377748af8ec86289182876a3). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1176.882915] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Creating folder: Instances. Parent ref: group-v843533. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1176.884021] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2728a94f-4303-47e5-8c35-8db84fea9858 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.893430] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Created folder: Instances in parent group-v843533. [ 1176.893846] env[61473]: DEBUG oslo.service.loopingcall [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1176.894198] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1176.896024] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ea5a646-8e65-40ff-a77a-f24d409839ba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.914594] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1176.914594] env[61473]: value = "task-4281615" [ 1176.914594] env[61473]: _type = "Task" [ 1176.914594] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.924606] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281615, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.241376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "284671f0-2679-4344-86fa-4ea0f05f09bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.425765] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281615, 'name': CreateVM_Task, 'duration_secs': 0.304047} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.426035] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1177.426953] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'attachment_id': '2cd48526-9f61-4a38-b9ca-84e068ae54c2', 'guest_format': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843536', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'name': 'volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '284671f0-2679-4344-86fa-4ea0f05f09bb', 'attached_at': '', 'detached_at': '', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'serial': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61473) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1177.427254] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Root volume attach. Driver type: vmdk {{(pid=61473) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1177.428298] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b5b7d7-fa25-4f6d-af68-411ada7f762a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.435411] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e85a33-1b52-4873-ae0e-a132497bab99 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.442827] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c27ebda-aa63-4634-ade6-dfef9545de5a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.448444] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2804d656-5594-443e-b0bf-f5c9d1d38c67 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.455810] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1177.455810] env[61473]: value = "task-4281616" [ 1177.455810] env[61473]: _type = "Task" [ 1177.455810] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.463298] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281616, 'name': RelocateVM_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.965614] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281616, 'name': RelocateVM_Task, 'duration_secs': 0.359078} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.966016] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Volume attach. Driver type: vmdk {{(pid=61473) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1177.966079] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843536', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'name': 'volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '284671f0-2679-4344-86fa-4ea0f05f09bb', 'attached_at': '', 'detached_at': '', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'serial': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c'} {{(pid=61473) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1177.966827] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb983c8f-fea5-46ce-ba29-ac8b2c6c558f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.981787] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df332df2-b012-4988-b2d4-2f9ea841fa7c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.003133] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c/volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c.vmdk or device None with type thin {{(pid=61473) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.003356] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98b96fda-c9a5-4fa5-95f6-3862c701b795 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.022398] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1178.022398] env[61473]: value = "task-4281617" [ 1178.022398] env[61473]: _type = "Task" [ 1178.022398] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.029791] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281617, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.532813] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281617, 'name': ReconfigVM_Task, 'duration_secs': 0.247169} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.537127] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Reconfigured VM instance instance-00000037 to attach disk [datastore2] volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c/volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c.vmdk or device None with type thin {{(pid=61473) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.541915] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca17d6e-82ad-4934-b005-530d0cc5925f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.563111] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1178.563111] env[61473]: value = "task-4281618" [ 1178.563111] env[61473]: _type = "Task" [ 1178.563111] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.576667] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281618, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.768359] env[61473]: DEBUG nova.compute.manager [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Received event network-changed-7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1178.768627] env[61473]: DEBUG nova.compute.manager [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Refreshing instance network info cache due to event network-changed-7b6e63a5-0428-4761-b61e-0886343f930d. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1178.768913] env[61473]: DEBUG oslo_concurrency.lockutils [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] Acquiring lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.769138] env[61473]: DEBUG oslo_concurrency.lockutils [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] Acquired lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.769370] env[61473]: DEBUG nova.network.neutron [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Refreshing network info cache for port 7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1179.028700] env[61473]: DEBUG nova.network.neutron [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updated VIF entry in instance network info cache for port 7b6e63a5-0428-4761-b61e-0886343f930d. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1179.029074] env[61473]: DEBUG nova.network.neutron [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updating instance_info_cache with network_info: [{"id": "7b6e63a5-0428-4761-b61e-0886343f930d", "address": "fa:16:3e:79:da:3e", "network": {"id": "f4e8ba23-427c-43ce-8c81-084e688d6b91", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-539903186-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f30e1d6a377748af8ec86289182876a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6e63a5-04", "ovs_interfaceid": "7b6e63a5-0428-4761-b61e-0886343f930d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.038647] env[61473]: DEBUG oslo_concurrency.lockutils [req-f6d88e45-154a-416f-9c5f-254500f18b5e req-ffee6e08-d176-4bc3-aa7e-aa923558a25f service nova] Releasing lock "refresh_cache-284671f0-2679-4344-86fa-4ea0f05f09bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.073432] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281618, 'name': ReconfigVM_Task, 'duration_secs': 0.114956} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.073770] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843536', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'name': 'volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '284671f0-2679-4344-86fa-4ea0f05f09bb', 'attached_at': '', 'detached_at': '', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'serial': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c'} {{(pid=61473) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1179.074570] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c150b2d1-41b6-4b72-9551-78ae733ae28c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.080284] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1179.080284] env[61473]: value = "task-4281619" [ 1179.080284] env[61473]: _type = "Task" [ 1179.080284] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.089459] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281619, 'name': Rename_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.589612] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281619, 'name': Rename_Task, 'duration_secs': 0.129327} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.589934] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Powering on the VM {{(pid=61473) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1179.590929] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ccc068d-4580-4213-9275-27702d1c272a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.596329] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1179.596329] env[61473]: value = "task-4281620" [ 1179.596329] env[61473]: _type = "Task" [ 1179.596329] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.603412] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.106196] env[61473]: DEBUG oslo_vmware.api [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281620, 'name': PowerOnVM_Task, 'duration_secs': 0.433658} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.106496] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Powered on the VM {{(pid=61473) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1180.106642] env[61473]: INFO nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Took 4.35 seconds to spawn the instance on the hypervisor. [ 1180.106818] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Checking state {{(pid=61473) _get_power_state /opt/stack/nova/nova/compute/manager.py:1787}} [ 1180.107568] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e515e88-9c46-4e8c-b80c-339dbb66fab5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.150416] env[61473]: DEBUG nova.compute.utils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Conflict updating instance 284671f0-2679-4344-86fa-4ea0f05f09bb. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1180.152145] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance disappeared during build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2509}} [ 1180.152324] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1180.152543] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1180.152760] env[61473]: DEBUG nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1180.152945] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1180.498132] env[61473]: DEBUG nova.network.neutron [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.513036] env[61473]: INFO nova.compute.manager [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Took 0.36 seconds to deallocate network for instance. [ 1180.590284] env[61473]: INFO nova.scheduler.client.report [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Deleted allocations for instance 284671f0-2679-4344-86fa-4ea0f05f09bb [ 1180.590284] env[61473]: DEBUG oslo_concurrency.lockutils [None req-da3ca978-14f0-4904-9cd9-efcc893ad60d tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.106s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.590441] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.349s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.590783] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.591354] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.591865] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.593972] env[61473]: INFO nova.compute.manager [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Terminating instance [ 1180.596055] env[61473]: DEBUG nova.compute.manager [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1180.596271] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Powering off the VM {{(pid=61473) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1180.596509] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ffac6aa-fc98-4a8a-9c96-f5b2cc08936a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.600831] env[61473]: DEBUG nova.compute.manager [None req-3d14ec99-cc44-46c8-a08f-f5aa2d9eb390 tempest-ServerActionsTestOtherA-1494680182 tempest-ServerActionsTestOtherA-1494680182-project-member] [instance: 55ef17e9-54f8-429e-91bb-22a9be430200] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1180.609286] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1180.609286] env[61473]: value = "task-4281621" [ 1180.609286] env[61473]: _type = "Task" [ 1180.609286] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.618533] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.636379] env[61473]: DEBUG nova.compute.manager [None req-3d14ec99-cc44-46c8-a08f-f5aa2d9eb390 tempest-ServerActionsTestOtherA-1494680182 tempest-ServerActionsTestOtherA-1494680182-project-member] [instance: 55ef17e9-54f8-429e-91bb-22a9be430200] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1180.670319] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3d14ec99-cc44-46c8-a08f-f5aa2d9eb390 tempest-ServerActionsTestOtherA-1494680182 tempest-ServerActionsTestOtherA-1494680182-project-member] Lock "55ef17e9-54f8-429e-91bb-22a9be430200" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.847s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.685755] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1180.750996] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.751332] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.752747] env[61473]: INFO nova.compute.claims [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.796594] env[61473]: DEBUG nova.compute.manager [req-e20bcc8c-3826-42e5-b3e6-ba677fe909c5 req-3bdb4957-1369-4069-b8a1-cee39e0bd6cf service nova] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Received event network-vif-deleted-7b6e63a5-0428-4761-b61e-0886343f930d {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1181.092979] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bd4ebc-4b6c-46fc-8f64-98f71bbe5e67 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.101018] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4627c7a-a247-4c86-8fdb-ef764b853627 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.132527] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c12818-5b52-45ee-bfdc-3c3036122805 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.139924] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281621, 'name': PowerOffVM_Task, 'duration_secs': 0.191648} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.142054] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Powered off the VM {{(pid=61473) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1181.142054] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Volume detach. Driver type: vmdk {{(pid=61473) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1181.142253] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843536', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'name': 'volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '284671f0-2679-4344-86fa-4ea0f05f09bb', 'attached_at': '', 'detached_at': '', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'serial': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c'} {{(pid=61473) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1181.143196] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b920eac-c257-4688-9e4e-cbd350ee88de {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.146359] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb299129-2f49-4fa1-8774-0844e1b0f67e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.159717] env[61473]: DEBUG nova.compute.provider_tree [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.175051] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766557fe-80f9-4d3b-b61c-e0e5ddfe19c1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.178122] env[61473]: DEBUG nova.scheduler.client.report [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1181.186159] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b52e0f-08c3-4e8e-8988-0764d22ec314 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.206938] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.456s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.207537] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1181.210736] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8bbbda-75c3-4b87-8070-0ae7c3dfa801 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.229485] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] The volume has not been displaced from its original location: [datastore2] volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c/volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c.vmdk. No consolidation needed. {{(pid=61473) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1181.234802] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=61473) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1181.235391] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eafe12af-cbc8-4ade-ad42-5b4628b8698f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.249888] env[61473]: DEBUG nova.compute.utils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1181.254082] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1181.254082] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1181.258172] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1181.258172] env[61473]: value = "task-4281622" [ 1181.258172] env[61473]: _type = "Task" [ 1181.258172] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.265351] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1181.268269] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281622, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.333742] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1181.340896] env[61473]: DEBUG nova.policy [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba6dfa5ca0c74d02bc8a4951e3dfc911', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90e2c696fdd143598a730850ede006f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1181.362169] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:15:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='aa2cbf43-1241-4465-a5a7-d697a8701e3e',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-57095698',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1181.362423] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1181.362580] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.362809] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1181.363037] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.363147] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1181.363353] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1181.363509] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1181.363674] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1181.363839] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1181.364022] env[61473]: DEBUG nova.virt.hardware [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1181.364921] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc147b5-f101-4bb8-80aa-b1c53f28a854 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.373465] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5effd3cf-e1ea-47bd-ba20-2331544354e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.744373] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Successfully created port: 733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.768754] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281622, 'name': ReconfigVM_Task, 'duration_secs': 0.153269} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.769065] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=61473) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1181.774496] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a95783d9-5fdd-425b-ab29-85f56bea8ebd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.789986] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1181.789986] env[61473]: value = "task-4281623" [ 1181.789986] env[61473]: _type = "Task" [ 1181.789986] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.799110] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.307888] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281623, 'name': ReconfigVM_Task, 'duration_secs': 0.113895} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.308698] env[61473]: DEBUG nova.virt.vmwareapi.volumeops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-843536', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'name': 'volume-2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '284671f0-2679-4344-86fa-4ea0f05f09bb', 'attached_at': '', 'detached_at': '', 'volume_id': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c', 'serial': '2a5154ef-2d1c-4570-a2b6-1abd76c14e9c'} {{(pid=61473) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1182.308955] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1182.310028] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b901b59-9441-4a74-9693-242a91d8c12b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.319390] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1182.319644] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5aa4bbf0-6b13-4eb2-b7a3-a6682af4cfa5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.381216] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1182.381438] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1182.381615] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Deleting the datastore file [datastore2] 284671f0-2679-4344-86fa-4ea0f05f09bb {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1182.381874] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c9325a0-b465-4f0b-88a7-bfb288165425 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.389103] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for the task: (returnval){ [ 1182.389103] env[61473]: value = "task-4281625" [ 1182.389103] env[61473]: _type = "Task" [ 1182.389103] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.397310] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.551215] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Successfully updated port: 733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1182.562424] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.562577] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.562814] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1182.634083] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1182.839797] env[61473]: DEBUG nova.compute.manager [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Received event network-vif-plugged-733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1182.839797] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Acquiring lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.839797] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.839797] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.839928] env[61473]: DEBUG nova.compute.manager [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] No waiting events found dispatching network-vif-plugged-733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1182.839928] env[61473]: WARNING nova.compute.manager [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Received unexpected event network-vif-plugged-733d4eab-f2ad-4b82-ab7d-6c3075330b30 for instance with vm_state building and task_state spawning. [ 1182.839928] env[61473]: DEBUG nova.compute.manager [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Received event network-changed-733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1182.839928] env[61473]: DEBUG nova.compute.manager [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Refreshing instance network info cache due to event network-changed-733d4eab-f2ad-4b82-ab7d-6c3075330b30. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1182.839928] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Acquiring lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.882946] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Updating instance_info_cache with network_info: [{"id": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "address": "fa:16:3e:ec:87:c3", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733d4eab-f2", "ovs_interfaceid": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.899579] env[61473]: DEBUG oslo_vmware.api [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Task: {'id': task-4281625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084157} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.899697] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1182.899871] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1182.900071] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1182.900254] env[61473]: INFO nova.compute.manager [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Took 2.30 seconds to destroy the instance on the hypervisor. [ 1182.900501] env[61473]: DEBUG oslo.service.loopingcall [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1182.900687] env[61473]: DEBUG nova.compute.manager [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1182.900779] env[61473]: DEBUG nova.network.neutron [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1182.903937] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.904252] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance network_info: |[{"id": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "address": "fa:16:3e:ec:87:c3", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733d4eab-f2", "ovs_interfaceid": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1182.904533] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Acquired lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.904809] env[61473]: DEBUG nova.network.neutron [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Refreshing network info cache for port 733d4eab-f2ad-4b82-ab7d-6c3075330b30 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1182.905930] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:87:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '733d4eab-f2ad-4b82-ab7d-6c3075330b30', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.913719] env[61473]: DEBUG oslo.service.loopingcall [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1182.915018] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1182.917285] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cede448-dd24-472a-8de4-486e2ae921ef {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.941348] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.941348] env[61473]: value = "task-4281626" [ 1182.941348] env[61473]: _type = "Task" [ 1182.941348] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.942485] env[61473]: DEBUG nova.network.neutron [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.953698] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281626, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.955248] env[61473]: INFO nova.compute.manager [-] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Took 0.05 seconds to deallocate network for instance. [ 1183.027982] env[61473]: INFO nova.compute.manager [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Took 0.07 seconds to detach 1 volumes for instance. [ 1183.031582] env[61473]: DEBUG nova.compute.manager [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Deleting volume: 2a5154ef-2d1c-4570-a2b6-1abd76c14e9c {{(pid=61473) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3243}} [ 1183.126748] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.127155] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.127489] env[61473]: DEBUG nova.objects.instance [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lazy-loading 'resources' on Instance uuid 284671f0-2679-4344-86fa-4ea0f05f09bb {{(pid=61473) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.453172] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281626, 'name': CreateVM_Task, 'duration_secs': 0.301987} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.454485] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1183.454485] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.454485] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.454646] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1183.458017] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db112deb-57a7-4955-b161-95b2e185391f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.459530] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1183.459530] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]527752f6-ce4a-f56a-3dfe-9b354309d423" [ 1183.459530] env[61473]: _type = "Task" [ 1183.459530] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.467966] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]527752f6-ce4a-f56a-3dfe-9b354309d423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.512593] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f00cda5-008a-4a0b-bd5e-529f5a230deb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.523071] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eeaf1f-072b-46ad-a10f-18cef6f05755 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.553895] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a44842f-d4ab-4a6a-9818-1ac9a0e3c20d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.563293] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea7da91-52bc-4f68-a3a9-153998d6a0eb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.566476] env[61473]: DEBUG nova.network.neutron [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Updated VIF entry in instance network info cache for port 733d4eab-f2ad-4b82-ab7d-6c3075330b30. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1183.567010] env[61473]: DEBUG nova.network.neutron [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Updating instance_info_cache with network_info: [{"id": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "address": "fa:16:3e:ec:87:c3", "network": {"id": "3656c95e-caa4-4b60-a475-15dc98f6a5b6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.125", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ceda3158edb84e739c8c130271e8cb2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap733d4eab-f2", "ovs_interfaceid": "733d4eab-f2ad-4b82-ab7d-6c3075330b30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.575831] env[61473]: DEBUG nova.compute.provider_tree [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.579047] env[61473]: DEBUG oslo_concurrency.lockutils [req-0d4fc7c1-59c0-45f5-84fe-15485890baa8 req-dfb35c3d-5ab0-4ac9-9916-6ba6bb3d32b3 service nova] Releasing lock "refresh_cache-02a53e4f-55aa-4d13-8f74-13ddfe37fae4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.585922] env[61473]: DEBUG nova.scheduler.client.report [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1183.603217] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.476s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.698278] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1779e3af-3f53-4136-82df-84f224344aee tempest-ServersTestBootFromVolume-1251734831 tempest-ServersTestBootFromVolume-1251734831-project-member] Lock "284671f0-2679-4344-86fa-4ea0f05f09bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.108s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.970327] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.970606] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.970819] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.060421] env[61473]: DEBUG oslo_concurrency.lockutils [None req-411606f2-4226-45b1-8326-2edfcd455c2d tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "271c4f52-31ac-43ea-9bfb-5adf561684c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.061163] env[61473]: DEBUG oslo_concurrency.lockutils [None req-411606f2-4226-45b1-8326-2edfcd455c2d tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "271c4f52-31ac-43ea-9bfb-5adf561684c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.656038] env[61473]: WARNING oslo_vmware.rw_handles [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1221.656038] env[61473]: ERROR oslo_vmware.rw_handles [ 1221.656713] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1221.658392] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1221.658444] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Copying Virtual Disk [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/5f5b7120-d6b6-4454-9836-d9042f265fef/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1221.658730] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a56057e-123e-4bb6-82f4-35c4480f23ec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.666832] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1221.666832] env[61473]: value = "task-4281628" [ 1221.666832] env[61473]: _type = "Task" [ 1221.666832] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.674700] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.176826] env[61473]: DEBUG oslo_vmware.exceptions [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1222.178054] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.178054] env[61473]: ERROR nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1222.178054] env[61473]: Faults: ['InvalidArgument'] [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] Traceback (most recent call last): [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] yield resources [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self.driver.spawn(context, instance, image_meta, [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1222.178054] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self._fetch_image_if_missing(context, vi) [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] image_cache(vi, tmp_image_ds_loc) [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] vm_util.copy_virtual_disk( [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] session._wait_for_task(vmdk_copy_task) [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return self.wait_for_task(task_ref) [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return evt.wait() [ 1222.178641] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] result = hub.switch() [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return self.greenlet.switch() [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self.f(*self.args, **self.kw) [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] raise exceptions.translate_fault(task_info.error) [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] Faults: ['InvalidArgument'] [ 1222.179146] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] [ 1222.179146] env[61473]: INFO nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Terminating instance [ 1222.179689] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.179897] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.180156] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71e77254-c8cc-49e2-be6c-0c34df80a983 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.182579] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1222.182579] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1222.183389] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb92475d-a53e-4ea8-9f94-48a4ed0fc754 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.190435] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1222.190654] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b789f8b8-53e3-4d75-ad53-5666396c96fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.192913] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.193109] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1222.194101] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010283f3-78f4-4c51-afca-8287c5c249d8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.198713] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for the task: (returnval){ [ 1222.198713] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52f613f2-0499-dd7f-f695-befc84159071" [ 1222.198713] env[61473]: _type = "Task" [ 1222.198713] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.206199] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52f613f2-0499-dd7f-f695-befc84159071, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.267340] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1222.267574] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1222.267755] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleting the datastore file [datastore2] c6880758-25cf-4078-9455-827db6fb6435 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.268031] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1bddb8c-7bf9-4c04-af70-5219e693238b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.274346] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for the task: (returnval){ [ 1222.274346] env[61473]: value = "task-4281630" [ 1222.274346] env[61473]: _type = "Task" [ 1222.274346] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.282238] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.709218] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1222.709512] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Creating directory with path [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.709682] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe4d5c07-1e76-4ac4-b067-a92655bf6eb0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.721505] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Created directory with path [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.721712] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Fetch image to [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1222.721876] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1222.722635] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda05bda-77dc-4a36-add5-0e471bc289c5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.729384] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93be98c1-2430-4f13-a083-86d4a5a69d04 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.738488] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6b1611-1fbf-4c93-b618-d5acaefc97d6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.769842] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9932f0-27e5-4db2-a2b9-c45548c93f45 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.778613] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-77b473fb-38fa-4bc8-8640-3a006a6a8b8f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.784919] env[61473]: DEBUG oslo_vmware.api [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Task: {'id': task-4281630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072381} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.785179] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.785354] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1222.785535] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1222.785680] env[61473]: INFO nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1222.789476] env[61473]: DEBUG nova.compute.claims [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1222.789687] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.789956] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.801583] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1222.862103] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1222.927692] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1222.927892] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1222.966386] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.179941] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b6d9c5-5066-4ede-aad4-8c8a9c34e9f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.187631] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eba9d43-e84c-43b6-aea0-83f0b155e920 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.216946] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a81e484-1689-43a2-afcb-25123aed17ec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.224209] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6aaa22-1f5f-41d9-8b71-abb0d2c96f64 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.237010] env[61473]: DEBUG nova.compute.provider_tree [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.245913] env[61473]: DEBUG nova.scheduler.client.report [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1223.261422] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.471s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.262042] env[61473]: ERROR nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.262042] env[61473]: Faults: ['InvalidArgument'] [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] Traceback (most recent call last): [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self.driver.spawn(context, instance, image_meta, [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self._fetch_image_if_missing(context, vi) [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] image_cache(vi, tmp_image_ds_loc) [ 1223.262042] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] vm_util.copy_virtual_disk( [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] session._wait_for_task(vmdk_copy_task) [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return self.wait_for_task(task_ref) [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return evt.wait() [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] result = hub.switch() [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] return self.greenlet.switch() [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1223.262699] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] self.f(*self.args, **self.kw) [ 1223.263411] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1223.263411] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] raise exceptions.translate_fault(task_info.error) [ 1223.263411] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.263411] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] Faults: ['InvalidArgument'] [ 1223.263411] env[61473]: ERROR nova.compute.manager [instance: c6880758-25cf-4078-9455-827db6fb6435] [ 1223.263411] env[61473]: DEBUG nova.compute.utils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1223.265063] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Build of instance c6880758-25cf-4078-9455-827db6fb6435 was re-scheduled: A specified parameter was not correct: fileType [ 1223.265063] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1223.265561] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1223.265755] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1223.266024] env[61473]: DEBUG nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1223.266280] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1223.580956] env[61473]: DEBUG nova.network.neutron [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.594241] env[61473]: INFO nova.compute.manager [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Took 0.33 seconds to deallocate network for instance. [ 1223.695031] env[61473]: INFO nova.scheduler.client.report [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Deleted allocations for instance c6880758-25cf-4078-9455-827db6fb6435 [ 1223.714378] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c13521f1-d6b5-4982-8782-6529a8193e17 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 536.670s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.715675] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 338.951s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.715900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Acquiring lock "c6880758-25cf-4078-9455-827db6fb6435-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.716123] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.716293] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.718368] env[61473]: INFO nova.compute.manager [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Terminating instance [ 1223.720109] env[61473]: DEBUG nova.compute.manager [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1223.720304] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1223.720821] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5942b7df-282a-4d2a-ab90-2810e659b863 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.730829] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c770adf-c1a1-4567-ac30-ac3a709544f7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.742680] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1223.763279] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6880758-25cf-4078-9455-827db6fb6435 could not be found. [ 1223.763495] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1223.763608] env[61473]: INFO nova.compute.manager [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] [instance: c6880758-25cf-4078-9455-827db6fb6435] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1223.763988] env[61473]: DEBUG oslo.service.loopingcall [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.764221] env[61473]: DEBUG nova.compute.manager [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1223.764303] env[61473]: DEBUG nova.network.neutron [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1223.795232] env[61473]: DEBUG nova.network.neutron [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.802372] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.802663] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.804764] env[61473]: INFO nova.compute.claims [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1223.808192] env[61473]: INFO nova.compute.manager [-] [instance: c6880758-25cf-4078-9455-827db6fb6435] Took 0.04 seconds to deallocate network for instance. [ 1223.906075] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56ad4804-175b-4f6f-8188-82f4c15c4011 tempest-ServersAdminTestJSON-290085192 tempest-ServersAdminTestJSON-290085192-project-member] Lock "c6880758-25cf-4078-9455-827db6fb6435" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.192482] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73212d0f-1595-4715-b5e7-73afb6a32f94 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.200358] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f54174d-fcc3-48d6-baf4-6144db7db305 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.232820] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ca3c90-f003-435a-9941-5da1e219fbe2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.239690] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1634c3-d210-46d2-9da9-a0512b43dae2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.253182] env[61473]: DEBUG nova.compute.provider_tree [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.263220] env[61473]: DEBUG nova.scheduler.client.report [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1224.277009] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.474s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.277527] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1224.312704] env[61473]: DEBUG nova.compute.utils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1224.314062] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1224.314287] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1224.322844] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1224.391441] env[61473]: DEBUG nova.policy [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a236d0d1c33545e29521043ba0a7ed7b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88c2f68ebf2a40179a0f1c9547dfffc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1224.395136] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1224.423268] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1224.423508] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1224.423664] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.423862] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1224.424041] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.424199] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1224.424416] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1224.424576] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1224.424745] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1224.424908] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1224.425095] env[61473]: DEBUG nova.virt.hardware [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1224.425994] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7862a7-4c60-4f4d-862e-10f8f38c87f3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.434405] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e45b74f-a4f7-426a-8749-20c5fd411ec0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.463940] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.816547] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Successfully created port: fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1225.535679] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Successfully updated port: fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1225.546261] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.546414] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquired lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.546564] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1225.595103] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1225.623646] env[61473]: DEBUG nova.compute.manager [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Received event network-vif-plugged-fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1225.623877] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Acquiring lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.624091] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.624265] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.624432] env[61473]: DEBUG nova.compute.manager [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] No waiting events found dispatching network-vif-plugged-fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1225.624591] env[61473]: WARNING nova.compute.manager [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Received unexpected event network-vif-plugged-fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 for instance with vm_state building and task_state spawning. [ 1225.624747] env[61473]: DEBUG nova.compute.manager [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Received event network-changed-fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1225.625841] env[61473]: DEBUG nova.compute.manager [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Refreshing instance network info cache due to event network-changed-fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1225.625841] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Acquiring lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.816997] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Updating instance_info_cache with network_info: [{"id": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "address": "fa:16:3e:e8:40:33", "network": {"id": "6ee9e3ef-d656-4332-b329-bbc4e65d00ac", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-334877700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88c2f68ebf2a40179a0f1c9547dfffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb64aecb-5f", "ovs_interfaceid": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.828370] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Releasing lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.828667] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance network_info: |[{"id": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "address": "fa:16:3e:e8:40:33", "network": {"id": "6ee9e3ef-d656-4332-b329-bbc4e65d00ac", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-334877700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88c2f68ebf2a40179a0f1c9547dfffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb64aecb-5f", "ovs_interfaceid": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1225.828964] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Acquired lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.829162] env[61473]: DEBUG nova.network.neutron [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Refreshing network info cache for port fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1225.830253] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:40:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1225.838176] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Creating folder: Project (88c2f68ebf2a40179a0f1c9547dfffc6). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1225.838916] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a333c8ea-376d-4abc-b244-86b427072b2f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.852624] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Created folder: Project (88c2f68ebf2a40179a0f1c9547dfffc6) in parent group-v843485. [ 1225.852890] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Creating folder: Instances. Parent ref: group-v843554. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1225.853167] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-367201a1-58a9-4920-ab34-9a324de5355f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.861750] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Created folder: Instances in parent group-v843554. [ 1225.861986] env[61473]: DEBUG oslo.service.loopingcall [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.862184] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1225.862387] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29300c57-1590-4323-83ba-8a69702c59da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.884263] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1225.884263] env[61473]: value = "task-4281633" [ 1225.884263] env[61473]: _type = "Task" [ 1225.884263] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.891507] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281633, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.126056] env[61473]: DEBUG nova.network.neutron [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Updated VIF entry in instance network info cache for port fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1226.126446] env[61473]: DEBUG nova.network.neutron [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Updating instance_info_cache with network_info: [{"id": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "address": "fa:16:3e:e8:40:33", "network": {"id": "6ee9e3ef-d656-4332-b329-bbc4e65d00ac", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-334877700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "88c2f68ebf2a40179a0f1c9547dfffc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb64aecb-5f", "ovs_interfaceid": "fb64aecb-5f78-4a7c-a9c1-badf0f2fedd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.137021] env[61473]: DEBUG oslo_concurrency.lockutils [req-786b3351-b7cc-42d4-80d6-329cd80bb3b1 req-4bf51644-2172-46c0-94da-b3d8bb3aa858 service nova] Releasing lock "refresh_cache-d79207a6-43e0-474a-9c61-8a71a86da7a0" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.394203] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281633, 'name': CreateVM_Task, 'duration_secs': 0.276205} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.394394] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1226.395071] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.395272] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.395591] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1226.396183] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd3b2dc4-7194-4f92-87af-f6093df1fcfb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.400754] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for the task: (returnval){ [ 1226.400754] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5240a8a2-6107-cc4e-76f7-88a4ae75aa7b" [ 1226.400754] env[61473]: _type = "Task" [ 1226.400754] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.408450] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5240a8a2-6107-cc4e-76f7-88a4ae75aa7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.911787] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.911787] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1226.911787] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.978679] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.978679] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 1227.976062] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.976062] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1227.976062] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.976524] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11255}} [ 1227.993985] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] There are 2 instances to clean {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 1227.994503] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 284671f0-2679-4344-86fa-4ea0f05f09bb] Instance has had 0 of 5 cleanup attempts {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11268}} [ 1228.034696] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 889ee8c2-615c-477e-8fc5-65241759dc5f] Instance has had 0 of 5 cleanup attempts {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11268}} [ 1229.063791] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.064118] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.076580] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.076877] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.077113] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.077512] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1229.078457] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88fe5f8-c6b2-4271-933c-cfad5b526876 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.087225] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1cb1d4-7286-40e0-92f0-a8ca8adfd293 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.101310] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832fa2d7-1ed0-49f3-b695-227bc1e91d3e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.107642] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c92e5f2-97de-41e2-a172-f76cd66235a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.137042] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180606MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1229.137267] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.137430] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.294469] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d9395a72-994b-4baf-a296-2fc3d05a239c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.294644] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.294772] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.294896] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295030] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295155] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295270] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295385] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295498] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.295611] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.306980] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.317585] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64a46ce2-e173-4d23-b5a0-32e28e0f068c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.327186] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2002ff6b-8648-4ec4-be86-da7a0ee886f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.337588] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bea0e473-ff2e-453c-802a-84648b6d6c51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.347848] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 189c4110-3e1c-424e-8102-5b894fb27963 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.357750] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4c5fd04e-7c5f-4499-a9da-852301ecd9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.367791] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.381900] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.392203] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7e96360f-c62a-474e-a73e-9d7db6384987 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.401859] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8886a746-98cf-465d-b869-ebbe734ffa3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.412914] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.422743] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 271c4f52-31ac-43ea-9bfb-5adf561684c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.423051] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1229.423204] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1229.439237] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1229.454312] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1229.454550] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1229.467501] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1229.484594] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1229.734046] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3690f8-a2fc-4a23-9f60-2c7c3c3a94ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.741980] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf93735b-f90e-4a42-860c-9953a4c34eb7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.772540] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51848a85-ad6f-4993-a1ae-82ebc1cd3cd1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.779574] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d5a2b8-9351-4c2c-96e8-49f2ab602315 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.792639] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.801220] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.814742] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1229.814928] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.677s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.717526] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.717898] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.966578] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.966384] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.966677] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1231.966718] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1231.989777] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.989977] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.990119] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.990310] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.990481] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.990630] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.990837] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.991017] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.991177] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.991307] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1231.991428] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1231.991917] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.986667] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.961363] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.064608] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.064967] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.309307] env[61473]: WARNING oslo_vmware.rw_handles [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1270.309307] env[61473]: ERROR oslo_vmware.rw_handles [ 1270.310052] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1270.311980] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1270.312246] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Copying Virtual Disk [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/b949453c-1711-4f6a-9e0f-23eff160785f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1270.312543] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8ec882e-7c52-437b-a7dd-db959d1bb74b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.320795] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for the task: (returnval){ [ 1270.320795] env[61473]: value = "task-4281634" [ 1270.320795] env[61473]: _type = "Task" [ 1270.320795] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.329147] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Task: {'id': task-4281634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.831778] env[61473]: DEBUG oslo_vmware.exceptions [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1270.832087] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.832846] env[61473]: ERROR nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.832846] env[61473]: Faults: ['InvalidArgument'] [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Traceback (most recent call last): [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] yield resources [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self.driver.spawn(context, instance, image_meta, [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self._fetch_image_if_missing(context, vi) [ 1270.832846] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] image_cache(vi, tmp_image_ds_loc) [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] vm_util.copy_virtual_disk( [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] session._wait_for_task(vmdk_copy_task) [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return self.wait_for_task(task_ref) [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return evt.wait() [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] result = hub.switch() [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1270.833442] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return self.greenlet.switch() [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self.f(*self.args, **self.kw) [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] raise exceptions.translate_fault(task_info.error) [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Faults: ['InvalidArgument'] [ 1270.833853] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] [ 1270.833853] env[61473]: INFO nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Terminating instance [ 1270.834901] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.835127] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.835378] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60c16357-0a61-4523-b251-07bb9241ba52 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.837423] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.837582] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.837747] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1270.844276] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.844447] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1270.845154] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff0c0fd8-2b8c-4d4d-a9c2-3137a0241b13 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.852841] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for the task: (returnval){ [ 1270.852841] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]524cc232-d975-e4ec-f11a-19ade096a9be" [ 1270.852841] env[61473]: _type = "Task" [ 1270.852841] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.862374] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]524cc232-d975-e4ec-f11a-19ade096a9be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.866515] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1270.937560] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.947124] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Releasing lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.947549] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1270.947741] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1270.948802] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c1256c-9f93-4852-96b1-4ed225f19f39 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.956543] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1270.956756] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6a57de5-b04a-4997-bbfa-653c89f133ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.982621] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1270.982856] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1270.983052] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Deleting the datastore file [datastore2] d9395a72-994b-4baf-a296-2fc3d05a239c {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1270.983383] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6579ccd3-8190-4ef0-be8c-cd29be55a36d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.989949] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for the task: (returnval){ [ 1270.989949] env[61473]: value = "task-4281636" [ 1270.989949] env[61473]: _type = "Task" [ 1270.989949] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.997622] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Task: {'id': task-4281636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.364195] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1271.364565] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Creating directory with path [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1271.364750] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11e17928-cb29-4163-9a37-93db8080a1a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.376188] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Created directory with path [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1271.376392] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Fetch image to [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1271.376555] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1271.377312] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c5e308-7893-4f0c-a8ec-54d10c5b416a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.384831] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee3dc8c-b9e8-470e-ae77-f7e13531f710 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.394404] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6629ece-20d4-441d-85e0-1df5bef9722f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.424787] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c83101-702d-402f-8e65-292c7ff1fd0d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.430326] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dd7f1c82-b8e9-4e44-968c-40d0b2338975 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.451541] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1271.501524] env[61473]: DEBUG oslo_vmware.api [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Task: {'id': task-4281636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045109} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.501776] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1271.501958] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1271.502152] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1271.502326] env[61473]: INFO nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1271.502562] env[61473]: DEBUG oslo.service.loopingcall [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1271.502771] env[61473]: DEBUG nova.compute.manager [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1271.505218] env[61473]: DEBUG nova.compute.claims [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1271.505454] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.505739] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.509683] env[61473]: DEBUG oslo_vmware.rw_handles [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1271.571419] env[61473]: DEBUG oslo_vmware.rw_handles [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1271.571615] env[61473]: DEBUG oslo_vmware.rw_handles [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1271.846957] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a343493a-ecc6-4d62-8d10-c6bf6f253e6b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.854204] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1446cbe9-79bd-4606-b6b2-e3511d7f62f2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.884296] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a426ba7-cc60-4c74-8bcb-44696104fcd4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.891069] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188a2b71-45c0-4847-822a-88e8df3e64e5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.903734] env[61473]: DEBUG nova.compute.provider_tree [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.912568] env[61473]: DEBUG nova.scheduler.client.report [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1271.926386] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.421s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.926910] env[61473]: ERROR nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1271.926910] env[61473]: Faults: ['InvalidArgument'] [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Traceback (most recent call last): [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self.driver.spawn(context, instance, image_meta, [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self._fetch_image_if_missing(context, vi) [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] image_cache(vi, tmp_image_ds_loc) [ 1271.926910] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] vm_util.copy_virtual_disk( [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] session._wait_for_task(vmdk_copy_task) [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return self.wait_for_task(task_ref) [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return evt.wait() [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] result = hub.switch() [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] return self.greenlet.switch() [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1271.927351] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] self.f(*self.args, **self.kw) [ 1271.927756] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1271.927756] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] raise exceptions.translate_fault(task_info.error) [ 1271.927756] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1271.927756] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Faults: ['InvalidArgument'] [ 1271.927756] env[61473]: ERROR nova.compute.manager [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] [ 1271.927756] env[61473]: DEBUG nova.compute.utils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1271.929331] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Build of instance d9395a72-994b-4baf-a296-2fc3d05a239c was re-scheduled: A specified parameter was not correct: fileType [ 1271.929331] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1271.929709] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1271.929932] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1271.930100] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.930264] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1271.956358] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1272.030754] env[61473]: DEBUG nova.network.neutron [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.045651] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Releasing lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.045884] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1272.046562] env[61473]: DEBUG nova.compute.manager [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1272.161673] env[61473]: INFO nova.scheduler.client.report [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Deleted allocations for instance d9395a72-994b-4baf-a296-2fc3d05a239c [ 1272.186284] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7578f7da-4b38-4d69-9d7b-0353e5c153ff tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 578.377s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.187630] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.712s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.187889] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "d9395a72-994b-4baf-a296-2fc3d05a239c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.188126] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.188304] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.190504] env[61473]: INFO nova.compute.manager [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Terminating instance [ 1272.192319] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquiring lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1272.192533] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Acquired lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.193241] env[61473]: DEBUG nova.network.neutron [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1272.201091] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1272.221739] env[61473]: DEBUG nova.network.neutron [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1272.254895] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.255167] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.257019] env[61473]: INFO nova.compute.claims [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.286096] env[61473]: DEBUG nova.network.neutron [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.294932] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Releasing lock "refresh_cache-d9395a72-994b-4baf-a296-2fc3d05a239c" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.295436] env[61473]: DEBUG nova.compute.manager [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1272.295707] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1272.296315] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c2bcbcd-98ba-4b5a-b170-8022b6f630ff {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.310060] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbfddeb-eb6f-4cfc-94c3-649137054706 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.341305] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d9395a72-994b-4baf-a296-2fc3d05a239c could not be found. [ 1272.341513] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1272.341689] env[61473]: INFO nova.compute.manager [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1272.341926] env[61473]: DEBUG oslo.service.loopingcall [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1272.344271] env[61473]: DEBUG nova.compute.manager [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1272.344396] env[61473]: DEBUG nova.network.neutron [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1272.361488] env[61473]: DEBUG nova.network.neutron [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1272.368943] env[61473]: DEBUG nova.network.neutron [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.379647] env[61473]: INFO nova.compute.manager [-] [instance: d9395a72-994b-4baf-a296-2fc3d05a239c] Took 0.04 seconds to deallocate network for instance. [ 1272.467420] env[61473]: DEBUG oslo_concurrency.lockutils [None req-70d46ef2-2ced-4c49-9bf5-5e3dbc3a7e11 tempest-ServersAdmin275Test-1695820350 tempest-ServersAdmin275Test-1695820350-project-member] Lock "d9395a72-994b-4baf-a296-2fc3d05a239c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.280s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.575315] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52cecae-9aa0-4205-af21-6cb0077b1cad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.582726] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa38284e-f9cb-48fc-a047-1ae2f2fae357 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.611847] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d166caff-21d3-4976-bb1a-97c690426682 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.619028] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca357cc2-33ae-48de-97e9-3ec6de591044 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.632951] env[61473]: DEBUG nova.compute.provider_tree [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.645570] env[61473]: DEBUG nova.scheduler.client.report [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1272.661582] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.406s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.662986] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1272.700239] env[61473]: DEBUG nova.compute.utils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.702079] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1272.702305] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1272.721364] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1272.787622] env[61473]: DEBUG nova.policy [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e35b2dd42f4e4c759677d8b2211b3fcd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b3fd8db5dbd4f799bb77fb962e538d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1272.801909] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1272.832152] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1272.832152] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1272.832152] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1272.832515] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1272.832515] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1272.832515] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1272.832515] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1272.832515] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1272.832673] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1272.832673] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1272.832875] env[61473]: DEBUG nova.virt.hardware [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1272.833849] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a9ada5-814d-40a7-b359-48c49a3ab397 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.842466] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083f34cb-7cb8-4dc1-9cfc-2ca49de10750 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.137544] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Successfully created port: c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.824799] env[61473]: DEBUG nova.compute.manager [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Received event network-vif-plugged-c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1273.825068] env[61473]: DEBUG oslo_concurrency.lockutils [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] Acquiring lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.827505] env[61473]: DEBUG oslo_concurrency.lockutils [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.827505] env[61473]: DEBUG oslo_concurrency.lockutils [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.827505] env[61473]: DEBUG nova.compute.manager [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] No waiting events found dispatching network-vif-plugged-c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1273.827505] env[61473]: WARNING nova.compute.manager [req-e8d9fa82-595d-4660-9406-037a1b0ba92d req-d9f11ddf-8e79-4ca1-8a4c-a13e47b6ef94 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Received unexpected event network-vif-plugged-c073847f-a68b-410a-a3b6-afcc83586775 for instance with vm_state building and task_state spawning. [ 1273.916942] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Successfully updated port: c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1273.933512] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.933659] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquired lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.933828] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1274.003120] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1274.181936] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Updating instance_info_cache with network_info: [{"id": "c073847f-a68b-410a-a3b6-afcc83586775", "address": "fa:16:3e:e2:66:0c", "network": {"id": "421ec56f-018a-43be-a2a4-8c7d43f3986f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-382885451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b3fd8db5dbd4f799bb77fb962e538d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc073847f-a6", "ovs_interfaceid": "c073847f-a68b-410a-a3b6-afcc83586775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.196868] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Releasing lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.197221] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance network_info: |[{"id": "c073847f-a68b-410a-a3b6-afcc83586775", "address": "fa:16:3e:e2:66:0c", "network": {"id": "421ec56f-018a-43be-a2a4-8c7d43f3986f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-382885451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b3fd8db5dbd4f799bb77fb962e538d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc073847f-a6", "ovs_interfaceid": "c073847f-a68b-410a-a3b6-afcc83586775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1274.197741] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:66:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '750b5f9b-f78a-4650-9153-c5bb117e507c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c073847f-a68b-410a-a3b6-afcc83586775', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.205604] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Creating folder: Project (5b3fd8db5dbd4f799bb77fb962e538d1). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1274.206178] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20d67fdd-273b-448b-977a-cee24c91203d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.219832] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Created folder: Project (5b3fd8db5dbd4f799bb77fb962e538d1) in parent group-v843485. [ 1274.219832] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Creating folder: Instances. Parent ref: group-v843557. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1274.219968] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9db155b3-5f24-4266-bba4-97d7d0d6b3db {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.230686] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Created folder: Instances in parent group-v843557. [ 1274.230911] env[61473]: DEBUG oslo.service.loopingcall [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1274.231103] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1274.231294] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-918ffc12-bd10-4232-b653-4b6b1f68e01c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.251220] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.251220] env[61473]: value = "task-4281639" [ 1274.251220] env[61473]: _type = "Task" [ 1274.251220] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.262014] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281639, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.760533] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281639, 'name': CreateVM_Task, 'duration_secs': 0.28692} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.760803] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1274.761553] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.761731] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.762054] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1274.762303] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15c0d422-02e1-47c5-9ef2-6ffa24b3b160 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.766492] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for the task: (returnval){ [ 1274.766492] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]520139b2-6433-df5b-883a-794413066527" [ 1274.766492] env[61473]: _type = "Task" [ 1274.766492] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.775072] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]520139b2-6433-df5b-883a-794413066527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.276345] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.276655] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.276796] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.850134] env[61473]: DEBUG nova.compute.manager [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Received event network-changed-c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1275.850327] env[61473]: DEBUG nova.compute.manager [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Refreshing instance network info cache due to event network-changed-c073847f-a68b-410a-a3b6-afcc83586775. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1275.850571] env[61473]: DEBUG oslo_concurrency.lockutils [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] Acquiring lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.850719] env[61473]: DEBUG oslo_concurrency.lockutils [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] Acquired lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.850881] env[61473]: DEBUG nova.network.neutron [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Refreshing network info cache for port c073847f-a68b-410a-a3b6-afcc83586775 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1276.161690] env[61473]: DEBUG nova.network.neutron [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Updated VIF entry in instance network info cache for port c073847f-a68b-410a-a3b6-afcc83586775. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1276.162090] env[61473]: DEBUG nova.network.neutron [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Updating instance_info_cache with network_info: [{"id": "c073847f-a68b-410a-a3b6-afcc83586775", "address": "fa:16:3e:e2:66:0c", "network": {"id": "421ec56f-018a-43be-a2a4-8c7d43f3986f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-382885451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b3fd8db5dbd4f799bb77fb962e538d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc073847f-a6", "ovs_interfaceid": "c073847f-a68b-410a-a3b6-afcc83586775", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.171996] env[61473]: DEBUG oslo_concurrency.lockutils [req-4fa3832a-04cd-4b19-8914-5da085c9e9bb req-7ce970d1-32d7-4bb2-bfc8-216f9ca97b34 service nova] Releasing lock "refresh_cache-7886aeef-40ea-45e5-afa4-d04ca469649e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.362355] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.826567] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.826567] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.145208] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_power_states {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.167385] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Getting list of instances from cluster (obj){ [ 1282.167385] env[61473]: value = "domain-c8" [ 1282.167385] env[61473]: _type = "ClusterComputeResource" [ 1282.167385] env[61473]: } {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1282.169056] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0773f6-5cd4-48ab-9bd7-98caa1c4c694 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.186407] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Got total of 10 instances {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1282.186856] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid e28da414-8fb8-4470-873a-a285925dd988 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 57eb1cd4-7c95-4173-800b-385bed2dbbbe {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 3a350a34-7728-493f-a737-7a6a3071363e {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 9031b0d9-4e07-4afa-a597-770b80df2511 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190022] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid a12b01db-28b4-477d-aef2-99304505d8c9 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190297] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190297] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid d79207a6-43e0-474a-9c61-8a71a86da7a0 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190297] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 7886aeef-40ea-45e5-afa4-d04ca469649e {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1282.190297] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "e28da414-8fb8-4470-873a-a285925dd988" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190297] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190453] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "3a350a34-7728-493f-a737-7a6a3071363e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190453] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190453] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "9031b0d9-4e07-4afa-a597-770b80df2511" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190453] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190579] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "a12b01db-28b4-477d-aef2-99304505d8c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.190896] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.191253] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.191606] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "7886aeef-40ea-45e5-afa4-d04ca469649e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.667960] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "7886aeef-40ea-45e5-afa4-d04ca469649e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.966281] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.966571] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1288.965593] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.976918] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.977208] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.977322] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.977480] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1288.978703] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07bc0f1-dcdd-4b5d-aeab-4d6bc752693f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.987514] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f23d89-8655-4317-9737-282e7a85adfe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.002340] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4b7533-d3f1-4aff-9748-481a6f04b73a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.009684] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4a3a67-7923-4ca8-87ce-ea4eea014d2e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.040584] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180652MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1289.040846] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.041238] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.119348] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e28da414-8fb8-4470-873a-a285925dd988 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119348] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119348] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119348] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119501] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119501] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119501] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119501] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119632] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.119632] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.132449] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bea0e473-ff2e-453c-802a-84648b6d6c51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.141799] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 189c4110-3e1c-424e-8102-5b894fb27963 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.153976] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4c5fd04e-7c5f-4499-a9da-852301ecd9a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.167886] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.180056] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.193760] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7e96360f-c62a-474e-a73e-9d7db6384987 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.205059] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8886a746-98cf-465d-b869-ebbe734ffa3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.215903] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.226609] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 271c4f52-31ac-43ea-9bfb-5adf561684c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.237391] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.249049] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.249049] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1289.249049] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1289.560998] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c69a32-ba97-4f9b-bdd4-7bfe171090c6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.568433] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9039bc0b-3dd0-4545-b68c-4ab0a46351e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.597842] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715cb54b-ed52-4e2c-8ea0-537534db0407 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.604955] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9b0b6f-7527-4d82-93d0-f4be643e377c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.618715] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.627252] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.642108] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1289.642281] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.601s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.643966] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.643966] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.967576] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.967810] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.966591] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.967596] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.967891] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1292.968426] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1292.992366] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993276] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993276] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993421] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993550] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1292.993670] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1295.989470] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.934899] env[61473]: DEBUG oslo_concurrency.lockutils [None req-6c7638a3-7f3b-44d2-a28a-f554ac3eda0e tempest-ServerAddressesTestJSON-137553904 tempest-ServerAddressesTestJSON-137553904-project-member] Acquiring lock "eb01e325-e0f5-4eee-8e3c-22d7389589a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.935244] env[61473]: DEBUG oslo_concurrency.lockutils [None req-6c7638a3-7f3b-44d2-a28a-f554ac3eda0e tempest-ServerAddressesTestJSON-137553904 tempest-ServerAddressesTestJSON-137553904-project-member] Lock "eb01e325-e0f5-4eee-8e3c-22d7389589a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.474395] env[61473]: DEBUG oslo_concurrency.lockutils [None req-83256982-64c1-46a1-a406-b1f47bc9c096 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "83f00fd0-b61d-42c5-9232-a26da89f7b18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.474724] env[61473]: DEBUG oslo_concurrency.lockutils [None req-83256982-64c1-46a1-a406-b1f47bc9c096 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "83f00fd0-b61d-42c5-9232-a26da89f7b18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.866106] env[61473]: DEBUG oslo_concurrency.lockutils [None req-851d99cd-e08a-4055-a7a7-995b4b8ce503 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Acquiring lock "8d8a25ed-cec9-4736-be45-0d41b62028ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.866405] env[61473]: DEBUG oslo_concurrency.lockutils [None req-851d99cd-e08a-4055-a7a7-995b4b8ce503 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "8d8a25ed-cec9-4736-be45-0d41b62028ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.322855] env[61473]: WARNING oslo_vmware.rw_handles [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1317.322855] env[61473]: ERROR oslo_vmware.rw_handles [ 1317.323385] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1317.325064] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1317.325323] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Copying Virtual Disk [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/5131a056-eb18-4d61-b084-451834e20e5c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1317.325603] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4126e6fc-d478-4288-9347-3c943dd11109 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.333851] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for the task: (returnval){ [ 1317.333851] env[61473]: value = "task-4281640" [ 1317.333851] env[61473]: _type = "Task" [ 1317.333851] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.342088] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Task: {'id': task-4281640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.845523] env[61473]: DEBUG oslo_vmware.exceptions [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1317.961180] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.961180] env[61473]: ERROR nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.961180] env[61473]: Faults: ['InvalidArgument'] [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] Traceback (most recent call last): [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] yield resources [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self.driver.spawn(context, instance, image_meta, [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1317.961180] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self._fetch_image_if_missing(context, vi) [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] image_cache(vi, tmp_image_ds_loc) [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] vm_util.copy_virtual_disk( [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] session._wait_for_task(vmdk_copy_task) [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return self.wait_for_task(task_ref) [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return evt.wait() [ 1317.962565] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] result = hub.switch() [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return self.greenlet.switch() [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self.f(*self.args, **self.kw) [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] raise exceptions.translate_fault(task_info.error) [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] Faults: ['InvalidArgument'] [ 1317.963074] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] [ 1317.963074] env[61473]: INFO nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Terminating instance [ 1317.963432] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.963432] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.963432] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c2f7354-bbfa-4c52-bea6-9f5fdc244cdc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.963432] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1317.963432] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1317.963634] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ea39e6-d4a6-4c7e-bfb5-036546298d3a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.963634] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1317.963634] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cab083d9-c281-4c6e-acd6-48d55140942b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.963634] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.963634] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1317.963813] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831645a4-db0a-4365-af84-4ae294f7f18b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.963813] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Waiting for the task: (returnval){ [ 1317.963813] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5278283b-9b16-3596-011f-4a82381bdf87" [ 1317.963813] env[61473]: _type = "Task" [ 1317.963813] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.963813] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5278283b-9b16-3596-011f-4a82381bdf87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.963813] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1317.964073] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1317.964073] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Deleting the datastore file [datastore2] e28da414-8fb8-4470-873a-a285925dd988 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1317.964073] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6eb50638-52a9-4af7-b772-6a220b78f269 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.964073] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for the task: (returnval){ [ 1317.964073] env[61473]: value = "task-4281642" [ 1317.964073] env[61473]: _type = "Task" [ 1317.964073] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.964073] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Task: {'id': task-4281642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.381049] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1318.381049] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Creating directory with path [datastore2] vmware_temp/16e7b1f3-9f13-4719-9412-377d77179958/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.381049] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcbac588-0090-429a-9255-3c25335de755 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.392591] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Created directory with path [datastore2] vmware_temp/16e7b1f3-9f13-4719-9412-377d77179958/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.392799] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Fetch image to [datastore2] vmware_temp/16e7b1f3-9f13-4719-9412-377d77179958/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1318.392998] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/16e7b1f3-9f13-4719-9412-377d77179958/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1318.393759] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934f0761-e43f-426f-842d-9fad010b3f86 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.400213] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0188603b-14f3-4457-a897-72184a835a9c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.409093] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839b5f16-b142-4700-9979-53c9922f80cf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.442008] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bdb625-9eb4-4c35-becf-439a7416c90e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.448886] env[61473]: DEBUG oslo_vmware.api [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Task: {'id': task-4281642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070652} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.450302] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1318.450509] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1318.450694] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1318.450869] env[61473]: INFO nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1318.452937] env[61473]: DEBUG nova.compute.claims [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1318.453124] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.453339] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.456635] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-82b8c1de-4bbd-42d9-8acb-f8d5eacf9b0a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.541011] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1318.714768] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.717326] env[61473]: ERROR nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1318.717326] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1318.717828] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] yield resources [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.driver.spawn(context, instance, image_meta, [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._fetch_image_if_missing(context, vi) [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] images.fetch_image( [ 1318.718256] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] metadata = IMAGE_API.get(context, image_ref) [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return session.show(context, image_id, [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] _reraise_translated_image_exception(image_id) [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise new_exc.with_traceback(exc_trace) [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1318.718705] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1318.719122] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1318.719506] env[61473]: INFO nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Terminating instance [ 1318.719813] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.720077] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.720763] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1318.720983] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1318.723692] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf6b899f-579b-4b68-8f20-4d24d6384491 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.726533] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a237a9-e99d-45fe-8a4e-b162795ebdd0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.735105] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1318.735383] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc3cc817-b3ce-459f-93a7-4359edd39bb7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.738008] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.738215] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1318.739289] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f78ee575-a5b6-4637-912a-3338fe1c93e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.747095] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1318.747095] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52dc5249-f427-0733-470d-73a41af9680a" [ 1318.747095] env[61473]: _type = "Task" [ 1318.747095] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.760170] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52dc5249-f427-0733-470d-73a41af9680a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.804828] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1318.805074] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1318.805260] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Deleting the datastore file [datastore2] 57eb1cd4-7c95-4173-800b-385bed2dbbbe {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1318.805514] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b100ca42-fa7d-4448-8ebb-d0038d0059c8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.812386] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Waiting for the task: (returnval){ [ 1318.812386] env[61473]: value = "task-4281644" [ 1318.812386] env[61473]: _type = "Task" [ 1318.812386] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.820188] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Task: {'id': task-4281644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.855258] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e91211-23ed-4abb-891f-c89b46120a67 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.862205] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2296cc48-6bf8-4f24-9a66-9779cce2834e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.891616] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7522ecf-aac4-4096-8117-9dd508f0a5fa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.898654] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7881246-0cab-4966-aba2-4250bc46bacc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.911271] env[61473]: DEBUG nova.compute.provider_tree [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.920562] env[61473]: DEBUG nova.scheduler.client.report [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1318.934667] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.935206] env[61473]: ERROR nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1318.935206] env[61473]: Faults: ['InvalidArgument'] [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] Traceback (most recent call last): [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self.driver.spawn(context, instance, image_meta, [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self._fetch_image_if_missing(context, vi) [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] image_cache(vi, tmp_image_ds_loc) [ 1318.935206] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] vm_util.copy_virtual_disk( [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] session._wait_for_task(vmdk_copy_task) [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return self.wait_for_task(task_ref) [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return evt.wait() [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] result = hub.switch() [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] return self.greenlet.switch() [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1318.935618] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] self.f(*self.args, **self.kw) [ 1318.936055] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1318.936055] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] raise exceptions.translate_fault(task_info.error) [ 1318.936055] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1318.936055] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] Faults: ['InvalidArgument'] [ 1318.936055] env[61473]: ERROR nova.compute.manager [instance: e28da414-8fb8-4470-873a-a285925dd988] [ 1318.936055] env[61473]: DEBUG nova.compute.utils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1318.937315] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Build of instance e28da414-8fb8-4470-873a-a285925dd988 was re-scheduled: A specified parameter was not correct: fileType [ 1318.937315] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1318.937671] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1318.937842] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1318.938019] env[61473]: DEBUG nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1318.938186] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1319.257263] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1319.257562] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1319.257804] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80ad562d-7fac-450d-b49d-41e37fc0b33e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.269639] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1319.269840] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Fetch image to [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1319.270021] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1319.270806] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec11431-67fb-4e41-88d8-f07cb3f830e5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.277960] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6727b4c6-6b71-4e37-ae3d-2d1aa03e2840 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.288696] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec1bc4c-7900-43da-8b80-298838611cb4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.328886] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dec7c3-5c84-4879-84db-23bf01cfb2bb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.337765] env[61473]: DEBUG oslo_vmware.api [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Task: {'id': task-4281644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063462} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.339261] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.339528] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1319.339820] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1319.340249] env[61473]: INFO nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1319.343233] env[61473]: DEBUG nova.compute.claims [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1319.343339] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.343493] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.346737] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c1813d3d-93e0-4d39-86ed-81091aa3284c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.369857] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1319.426140] env[61473]: DEBUG nova.network.neutron [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.437071] env[61473]: DEBUG oslo_vmware.rw_handles [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1319.495023] env[61473]: INFO nova.compute.manager [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Took 0.56 seconds to deallocate network for instance. [ 1319.500826] env[61473]: DEBUG oslo_vmware.rw_handles [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1319.500826] env[61473]: DEBUG oslo_vmware.rw_handles [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1319.606190] env[61473]: INFO nova.scheduler.client.report [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Deleted allocations for instance e28da414-8fb8-4470-873a-a285925dd988 [ 1319.626509] env[61473]: DEBUG oslo_concurrency.lockutils [None req-918d81ac-04f7-4b61-aa81-ea9469d2c13f tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.736s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.627752] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.620s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.628126] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Acquiring lock "e28da414-8fb8-4470-873a-a285925dd988-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.628357] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.628540] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.630523] env[61473]: INFO nova.compute.manager [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Terminating instance [ 1319.632261] env[61473]: DEBUG nova.compute.manager [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1319.632457] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1319.632940] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b3b53a6-7a00-446b-972f-e3a0b98d440a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.639344] env[61473]: DEBUG nova.compute.manager [None req-904fbe08-37d1-4095-a918-4c069ca2fc51 tempest-ServerExternalEventsTest-1763021243 tempest-ServerExternalEventsTest-1763021243-project-member] [instance: 64a46ce2-e173-4d23-b5a0-32e28e0f068c] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1319.646349] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e0f62c-fc31-461c-a1f8-9617666dfb23 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.664875] env[61473]: DEBUG nova.compute.manager [None req-904fbe08-37d1-4095-a918-4c069ca2fc51 tempest-ServerExternalEventsTest-1763021243 tempest-ServerExternalEventsTest-1763021243-project-member] [instance: 64a46ce2-e173-4d23-b5a0-32e28e0f068c] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1319.675500] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e28da414-8fb8-4470-873a-a285925dd988 could not be found. [ 1319.675700] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1319.675880] env[61473]: INFO nova.compute.manager [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] [instance: e28da414-8fb8-4470-873a-a285925dd988] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1319.676136] env[61473]: DEBUG oslo.service.loopingcall [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.678493] env[61473]: DEBUG nova.compute.manager [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1319.678596] env[61473]: DEBUG nova.network.neutron [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1319.692299] env[61473]: DEBUG oslo_concurrency.lockutils [None req-904fbe08-37d1-4095-a918-4c069ca2fc51 tempest-ServerExternalEventsTest-1763021243 tempest-ServerExternalEventsTest-1763021243-project-member] Lock "64a46ce2-e173-4d23-b5a0-32e28e0f068c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.799s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.701693] env[61473]: DEBUG nova.compute.manager [None req-df6b5c31-e662-4cf6-a209-0ebc96fbdee4 tempest-ServersListShow296Test-962093324 tempest-ServersListShow296Test-962093324-project-member] [instance: 2002ff6b-8648-4ec4-be86-da7a0ee886f0] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1319.707412] env[61473]: DEBUG nova.network.neutron [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.716507] env[61473]: INFO nova.compute.manager [-] [instance: e28da414-8fb8-4470-873a-a285925dd988] Took 0.04 seconds to deallocate network for instance. [ 1319.742709] env[61473]: DEBUG nova.compute.manager [None req-df6b5c31-e662-4cf6-a209-0ebc96fbdee4 tempest-ServersListShow296Test-962093324 tempest-ServersListShow296Test-962093324-project-member] [instance: 2002ff6b-8648-4ec4-be86-da7a0ee886f0] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1319.776644] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df6b5c31-e662-4cf6-a209-0ebc96fbdee4 tempest-ServersListShow296Test-962093324 tempest-ServersListShow296Test-962093324-project-member] Lock "2002ff6b-8648-4ec4-be86-da7a0ee886f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.348s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.787751] env[61473]: DEBUG nova.compute.manager [None req-017474e5-b72d-4ba8-8ab2-954ed1627418 tempest-ServersTestManualDisk-71207237 tempest-ServersTestManualDisk-71207237-project-member] [instance: bea0e473-ff2e-453c-802a-84648b6d6c51] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1319.792436] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98df2c2-e45b-4abc-bae0-61605582c492 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.799885] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2940d5e2-4454-4fc4-b501-6c96c7c826bd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.831823] env[61473]: DEBUG nova.compute.manager [None req-017474e5-b72d-4ba8-8ab2-954ed1627418 tempest-ServersTestManualDisk-71207237 tempest-ServersTestManualDisk-71207237-project-member] [instance: bea0e473-ff2e-453c-802a-84648b6d6c51] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1319.832595] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f71b9bd-5148-400e-924d-957609afb25e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.838370] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e46fedcf-fb44-41bd-8646-8506965d035b tempest-ImagesOneServerTestJSON-1497840417 tempest-ImagesOneServerTestJSON-1497840417-project-member] Lock "e28da414-8fb8-4470-873a-a285925dd988" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.211s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.839281] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "e28da414-8fb8-4470-873a-a285925dd988" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 37.650s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.839508] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e28da414-8fb8-4470-873a-a285925dd988] During sync_power_state the instance has a pending task (deleting). Skip. [ 1319.839667] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "e28da414-8fb8-4470-873a-a285925dd988" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.843140] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e815fdec-930f-4410-91df-8ac47836eef7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.859303] env[61473]: DEBUG nova.compute.provider_tree [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.861272] env[61473]: DEBUG oslo_concurrency.lockutils [None req-017474e5-b72d-4ba8-8ab2-954ed1627418 tempest-ServersTestManualDisk-71207237 tempest-ServersTestManualDisk-71207237-project-member] Lock "bea0e473-ff2e-453c-802a-84648b6d6c51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.563s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.868017] env[61473]: DEBUG nova.scheduler.client.report [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1319.871430] env[61473]: DEBUG nova.compute.manager [None req-178d1e0c-ac54-4d6b-b8b9-1171daa5b7be tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] [instance: 189c4110-3e1c-424e-8102-5b894fb27963] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1319.880225] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.537s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.880976] env[61473]: ERROR nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1319.880976] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1319.881350] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.driver.spawn(context, instance, image_meta, [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._fetch_image_if_missing(context, vi) [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] images.fetch_image( [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] metadata = IMAGE_API.get(context, image_ref) [ 1319.881701] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return session.show(context, image_id, [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] _reraise_translated_image_exception(image_id) [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise new_exc.with_traceback(exc_trace) [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1319.882102] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1319.882474] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1319.882743] env[61473]: DEBUG nova.compute.utils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1319.883516] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Build of instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe was re-scheduled: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1319.883999] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1319.884190] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1319.884346] env[61473]: DEBUG nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1319.884506] env[61473]: DEBUG nova.network.neutron [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1319.900485] env[61473]: DEBUG nova.compute.manager [None req-178d1e0c-ac54-4d6b-b8b9-1171daa5b7be tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] [instance: 189c4110-3e1c-424e-8102-5b894fb27963] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1319.922298] env[61473]: DEBUG oslo_concurrency.lockutils [None req-178d1e0c-ac54-4d6b-b8b9-1171daa5b7be tempest-ServersTestMultiNic-987354845 tempest-ServersTestMultiNic-987354845-project-member] Lock "189c4110-3e1c-424e-8102-5b894fb27963" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.280s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.946963] env[61473]: DEBUG nova.compute.manager [None req-86cd00ee-efdf-4a05-ad79-4bdad30b8303 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 4c5fd04e-7c5f-4499-a9da-852301ecd9a4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1319.976270] env[61473]: DEBUG nova.compute.manager [None req-86cd00ee-efdf-4a05-ad79-4bdad30b8303 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 4c5fd04e-7c5f-4499-a9da-852301ecd9a4] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1319.997731] env[61473]: DEBUG neutronclient.v2_0.client [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1320.000114] env[61473]: ERROR nova.compute.manager [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1320.000114] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.000719] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.driver.spawn(context, instance, image_meta, [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._fetch_image_if_missing(context, vi) [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image_fetch(context, vi, tmp_image_ds_loc) [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] images.fetch_image( [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] metadata = IMAGE_API.get(context, image_ref) [ 1320.001868] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return session.show(context, image_id, [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] _reraise_translated_image_exception(image_id) [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise new_exc.with_traceback(exc_trace) [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = getattr(controller, method)(*args, **kwargs) [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._get(image_id) [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1320.002490] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] resp, body = self.http_client.get(url, headers=header) [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.request(url, 'GET', **kwargs) [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self._handle_response(resp) [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exc.from_response(resp, resp.content) [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.003068] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2452, in _do_build_and_run_instance [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._build_and_run_instance(context, instance, image, [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2744, in _build_and_run_instance [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exception.RescheduledException( [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.RescheduledException: Build of instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe was re-scheduled: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1320.003922] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] exception_handler_v20(status_code, error_body) [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise client_exc(message=error_message, [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Neutron server returns request_ids: ['req-f9bf3bd3-6986-4f85-8e79-9d367b691862'] [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3041, in _cleanup_allocated_networks [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._deallocate_network(context, instance, requested_networks) [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.network_api.deallocate_for_instance( [ 1320.004559] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] data = neutron.list_ports(**search_opts) [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.list('ports', self.ports_path, retrieve_all, [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] for r in self._pagination(collection, path, **params): [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] res = self.get(path, params=params) [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.005131] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.retry_request("GET", action, body=body, [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.do_request(method, action, body=body, [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._handle_fault_response(status_code, replybody, resp) [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exception.Unauthorized() [ 1320.005491] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.Unauthorized: Not authorized. [ 1320.005853] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.005853] env[61473]: DEBUG oslo_concurrency.lockutils [None req-86cd00ee-efdf-4a05-ad79-4bdad30b8303 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "4c5fd04e-7c5f-4499-a9da-852301ecd9a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.930s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.014474] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1320.070712] env[61473]: INFO nova.scheduler.client.report [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Deleted allocations for instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe [ 1320.077185] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.077459] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.078832] env[61473]: INFO nova.compute.claims [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.086169] env[61473]: DEBUG oslo_concurrency.lockutils [None req-937f9d48-de5c-4a33-b97e-17df13695225 tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.726s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.087128] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.521s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.087347] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Acquiring lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.087550] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.087715] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.089529] env[61473]: INFO nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Terminating instance [ 1320.091108] env[61473]: DEBUG nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1320.091304] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1320.091899] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-627e65d7-73f3-4ba6-9ced-080356aca202 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.094378] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1320.103834] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e500a57e-68cc-4187-8bb9-f32138fe8c44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.131333] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57eb1cd4-7c95-4173-800b-385bed2dbbbe could not be found. [ 1320.131333] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1320.131333] env[61473]: INFO nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1320.131521] env[61473]: DEBUG oslo.service.loopingcall [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1320.131804] env[61473]: DEBUG nova.compute.manager [-] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1320.131903] env[61473]: DEBUG nova.network.neutron [-] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1320.176855] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.265918] env[61473]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1320.265918] env[61473]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1320.265918] env[61473]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4eead6e4-045c-49cb-a5b4-21b0aae8a757'] [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1320.266578] env[61473]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1320.267176] env[61473]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1320.267665] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1320.267665] env[61473]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1320.267665] env[61473]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.267665] env[61473]: ERROR oslo.service.loopingcall [ 1320.267665] env[61473]: ERROR nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.301139] env[61473]: ERROR nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] exception_handler_v20(status_code, error_body) [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise client_exc(message=error_message, [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Neutron server returns request_ids: ['req-4eead6e4-045c-49cb-a5b4-21b0aae8a757'] [ 1320.301139] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During handling of the above exception, another exception occurred: [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Traceback (most recent call last): [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._delete_instance(context, instance, bdms) [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._shutdown_instance(context, instance, bdms) [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._try_deallocate_network(context, instance, requested_networks) [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] with excutils.save_and_reraise_exception(): [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.301539] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.force_reraise() [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise self.value [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] _deallocate_network_with_retries() [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return evt.wait() [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = hub.switch() [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.greenlet.switch() [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1320.301903] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = func(*self.args, **self.kw) [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] result = f(*args, **kwargs) [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._deallocate_network( [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self.network_api.deallocate_for_instance( [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] data = neutron.list_ports(**search_opts) [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.list('ports', self.ports_path, retrieve_all, [ 1320.302279] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] for r in self._pagination(collection, path, **params): [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] res = self.get(path, params=params) [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.retry_request("GET", action, body=body, [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1320.302704] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] return self.do_request(method, action, body=body, [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] ret = obj(*args, **kwargs) [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] self._handle_fault_response(status_code, replybody, resp) [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.303123] env[61473]: ERROR nova.compute.manager [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] [ 1320.330434] env[61473]: DEBUG oslo_concurrency.lockutils [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.332441] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 38.143s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.332703] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] During sync_power_state the instance has a pending task (deleting). Skip. [ 1320.332955] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "57eb1cd4-7c95-4173-800b-385bed2dbbbe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.383301] env[61473]: INFO nova.compute.manager [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] [instance: 57eb1cd4-7c95-4173-800b-385bed2dbbbe] Successfully reverted task state from None on failure for instance. [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server [None req-318cb89f-637a-4190-89bd-ae6a289e48cd tempest-ServerDiagnosticsTest-791504828 tempest-ServerDiagnosticsTest-791504828-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4eead6e4-045c-49cb-a5b4-21b0aae8a757'] [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1320.387996] env[61473]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.388523] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3349, in terminate_instance [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in do_terminate_instance [ 1320.389037] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1320.389551] env[61473]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.390223] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1320.390731] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1320.392147] env[61473]: ERROR oslo_messaging.rpc.server [ 1320.412507] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b02f67-303b-403a-84ef-70c6007a8e96 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.419487] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5600c45-01a9-4ee7-9f84-744b7fa7de39 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.449803] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705cae3a-82d7-46c6-b0c0-f8a6444d34a2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.457583] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ab5c37-f81b-4bdd-9646-980b9a91501e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.470654] env[61473]: DEBUG nova.compute.provider_tree [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.479997] env[61473]: DEBUG nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1320.496021] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.418s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.496541] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1320.498941] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.322s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.500324] env[61473]: INFO nova.compute.claims [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.537213] env[61473]: DEBUG nova.compute.utils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1320.538747] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1320.538931] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1320.551561] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1320.653397] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1320.679933] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1320.680204] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1320.680366] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.680535] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1320.680698] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.680817] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1320.681042] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1320.681209] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1320.681376] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1320.681616] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1320.681826] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1320.682702] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a276d38a-cab9-4b43-a368-af5b8a7df98c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.694232] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a1635b-3f52-45be-bdaa-04d51177f83a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.815017] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411a43a9-7e59-41f3-898c-e97f80f83bd6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.818718] env[61473]: DEBUG nova.policy [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed41fc16652481cb2b544cd792db1ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e0a2e7f01674740942b8185f4261d86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1320.824894] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76db494-0c20-4905-9778-5146869cc433 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.855455] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e6aa23-6e67-497b-9214-5fda87d744b9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.863162] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce10988f-36d7-47c7-bf0c-abef25a249ce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.876900] env[61473]: DEBUG nova.compute.provider_tree [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.887862] env[61473]: DEBUG nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1320.909983] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.410s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.910107] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1320.953195] env[61473]: DEBUG nova.compute.utils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1320.954536] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1320.954711] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1320.963427] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1321.028393] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1321.040457] env[61473]: DEBUG nova.policy [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed41fc16652481cb2b544cd792db1ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e0a2e7f01674740942b8185f4261d86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1321.057362] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1321.057662] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1321.057860] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.058093] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1321.058281] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.058463] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1321.058700] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1321.058891] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1321.059106] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1321.059310] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1321.059533] env[61473]: DEBUG nova.virt.hardware [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1321.060416] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230246a7-a8e4-4379-ab59-7653c4fcb605 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.068788] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a25abc-0668-49b4-b7a6-354c3adc6483 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.435246] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Successfully created port: 4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1321.752310] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Successfully created port: 790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.463816] env[61473]: DEBUG nova.compute.manager [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Received event network-vif-plugged-4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1322.464113] env[61473]: DEBUG oslo_concurrency.lockutils [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] Acquiring lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.464357] env[61473]: DEBUG oslo_concurrency.lockutils [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] Lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.464583] env[61473]: DEBUG oslo_concurrency.lockutils [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] Lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.464805] env[61473]: DEBUG nova.compute.manager [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] No waiting events found dispatching network-vif-plugged-4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1322.465047] env[61473]: WARNING nova.compute.manager [req-12f336aa-3104-4e59-9412-7df9932e283d req-d979b5eb-c376-4c4b-b028-645b7ae906c8 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Received unexpected event network-vif-plugged-4cd955eb-1ecb-42b8-81a2-11f9a848af92 for instance with vm_state building and task_state spawning. [ 1322.701128] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Successfully updated port: 4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1322.716542] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.716746] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.716930] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1322.825305] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1323.108746] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Successfully updated port: 790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1323.118608] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.118746] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.118889] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1323.205806] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1323.351632] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Updating instance_info_cache with network_info: [{"id": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "address": "fa:16:3e:7b:c4:22", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd955eb-1e", "ovs_interfaceid": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.369628] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.370038] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance network_info: |[{"id": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "address": "fa:16:3e:7b:c4:22", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd955eb-1e", "ovs_interfaceid": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1323.370828] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:c4:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cd955eb-1ecb-42b8-81a2-11f9a848af92', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.378760] env[61473]: DEBUG oslo.service.loopingcall [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1323.379256] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1323.379492] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0ca3daa-89a6-4fd2-a1a4-97cfa124cc33 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.402415] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.402415] env[61473]: value = "task-4281645" [ 1323.402415] env[61473]: _type = "Task" [ 1323.402415] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.410414] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281645, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.460549] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Updating instance_info_cache with network_info: [{"id": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "address": "fa:16:3e:91:8d:9c", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790a47cd-41", "ovs_interfaceid": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.474213] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.474859] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance network_info: |[{"id": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "address": "fa:16:3e:91:8d:9c", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790a47cd-41", "ovs_interfaceid": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1323.475074] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:8d:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '790a47cd-41e0-42eb-a42b-dcb3996ec7d6', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.482891] env[61473]: DEBUG oslo.service.loopingcall [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1323.484043] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1323.484043] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea4a8483-93cd-4477-b0a4-6c597300c3bc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.504390] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.504390] env[61473]: value = "task-4281646" [ 1323.504390] env[61473]: _type = "Task" [ 1323.504390] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.514178] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281646, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.912472] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281645, 'name': CreateVM_Task, 'duration_secs': 0.354144} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.912632] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1323.919295] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.919473] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.919795] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1323.920056] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a243535-b7bd-4011-b073-00509309080a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.924736] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1323.924736] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5200c0a9-b88f-a3bb-816a-af090f3b3789" [ 1323.924736] env[61473]: _type = "Task" [ 1323.924736] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.933557] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5200c0a9-b88f-a3bb-816a-af090f3b3789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.014031] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281646, 'name': CreateVM_Task, 'duration_secs': 0.328238} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.014157] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1324.014951] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.435789] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.436118] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.436352] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.436572] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.436877] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1324.437244] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc4f31c8-667c-46d1-868e-45d96254faa3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.441737] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1324.441737] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52ed71cd-3504-f3ed-056b-cba04c2002d8" [ 1324.441737] env[61473]: _type = "Task" [ 1324.441737] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.449666] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52ed71cd-3504-f3ed-056b-cba04c2002d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.497462] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Received event network-vif-plugged-790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1324.497706] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Acquiring lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.497852] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.498036] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.498267] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] No waiting events found dispatching network-vif-plugged-790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1324.498485] env[61473]: WARNING nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Received unexpected event network-vif-plugged-790a47cd-41e0-42eb-a42b-dcb3996ec7d6 for instance with vm_state building and task_state spawning. [ 1324.498776] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Received event network-changed-4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1324.499110] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Refreshing instance network info cache due to event network-changed-4cd955eb-1ecb-42b8-81a2-11f9a848af92. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1324.499451] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Acquiring lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.499700] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Acquired lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.499926] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Refreshing network info cache for port 4cd955eb-1ecb-42b8-81a2-11f9a848af92 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1324.775353] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Updated VIF entry in instance network info cache for port 4cd955eb-1ecb-42b8-81a2-11f9a848af92. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1324.775706] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Updating instance_info_cache with network_info: [{"id": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "address": "fa:16:3e:7b:c4:22", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd955eb-1e", "ovs_interfaceid": "4cd955eb-1ecb-42b8-81a2-11f9a848af92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.788358] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Releasing lock "refresh_cache-0367d64d-76f3-4483-bc17-77cd900569ef" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.788589] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Received event network-changed-790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1324.788750] env[61473]: DEBUG nova.compute.manager [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Refreshing instance network info cache due to event network-changed-790a47cd-41e0-42eb-a42b-dcb3996ec7d6. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1324.788945] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Acquiring lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.789096] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Acquired lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.789665] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Refreshing network info cache for port 790a47cd-41e0-42eb-a42b-dcb3996ec7d6 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1324.952920] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.953333] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.953419] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.118522] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Updated VIF entry in instance network info cache for port 790a47cd-41e0-42eb-a42b-dcb3996ec7d6. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1325.118874] env[61473]: DEBUG nova.network.neutron [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Updating instance_info_cache with network_info: [{"id": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "address": "fa:16:3e:91:8d:9c", "network": {"id": "db67d9fd-0ade-4f0e-b852-d6b55a124b51", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-12385500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e0a2e7f01674740942b8185f4261d86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790a47cd-41", "ovs_interfaceid": "790a47cd-41e0-42eb-a42b-dcb3996ec7d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.128094] env[61473]: DEBUG oslo_concurrency.lockutils [req-1e1789bd-01c0-477c-a2ec-d8ecc25ce0be req-787c78e2-a586-4cbd-b200-1c8831151112 service nova] Releasing lock "refresh_cache-a650e57a-85cf-416c-8787-a4ab98d4a930" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.747263] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "a650e57a-85cf-416c-8787-a4ab98d4a930" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.824946] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "0367d64d-76f3-4483-bc17-77cd900569ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.927131] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.927412] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.726268] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5a7e8c7e-7599-407a-b5fc-01fe20dd72a7 tempest-InstanceActionsTestJSON-171285110 tempest-InstanceActionsTestJSON-171285110-project-member] Acquiring lock "ca3c10ac-b3cf-4291-b070-42332b304686" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.726777] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5a7e8c7e-7599-407a-b5fc-01fe20dd72a7 tempest-InstanceActionsTestJSON-171285110 tempest-InstanceActionsTestJSON-171285110-project-member] Lock "ca3c10ac-b3cf-4291-b070-42332b304686" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.965642] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.965906] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1350.966362] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.966617] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.978251] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.978474] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.978641] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.978797] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1350.979913] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8ad5af-946d-4b44-9f91-ebcfd4226a8a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.988539] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950a1191-4578-4287-9516-4e975809a9b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.002588] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de7540-f79e-4dc8-b807-29263881a526 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.008964] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee70de7-2c4f-4a41-a319-44bb59f642fd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.039747] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180643MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1351.039906] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.040198] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.116550] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 3a350a34-7728-493f-a737-7a6a3071363e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.116713] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.116843] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.116970] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117159] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117290] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117409] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117528] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117645] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.117760] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.131710] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.143568] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 271c4f52-31ac-43ea-9bfb-5adf561684c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.154125] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.163587] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.172882] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance eb01e325-e0f5-4eee-8e3c-22d7389589a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.182051] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 83f00fd0-b61d-42c5-9232-a26da89f7b18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.193839] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8d8a25ed-cec9-4736-be45-0d41b62028ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.204328] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.214395] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca3c10ac-b3cf-4291-b070-42332b304686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.214628] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1351.214775] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1351.416599] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d44ecf-d018-45a8-8fb9-4731f8c8cb75 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.424105] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b744b621-6193-495c-88b9-df3a0aa19c93 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.453971] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebf9ca4-f571-4a3b-8333-fa8ad7f254b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.460885] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad85a3f-3eba-44ae-ad25-b08d17d06b33 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.473651] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.482370] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1351.498308] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1351.498498] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.458s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.498460] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.498713] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.498879] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.967079] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.967261] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1352.967388] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1352.987478] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.987625] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.987759] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.987889] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988023] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988151] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988271] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988388] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988507] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988622] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1352.988741] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1352.989208] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.984918] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.962022] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.343532] env[61473]: WARNING oslo_vmware.rw_handles [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1365.343532] env[61473]: ERROR oslo_vmware.rw_handles [ 1365.344109] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1365.345854] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1365.346133] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Copying Virtual Disk [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/45350cc9-6e0d-4b03-9034-6c8f72be6de2/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1365.346499] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edc5e32c-6c10-4433-833f-ef246c701e68 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.354433] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1365.354433] env[61473]: value = "task-4281647" [ 1365.354433] env[61473]: _type = "Task" [ 1365.354433] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.362335] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.866057] env[61473]: DEBUG oslo_vmware.exceptions [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1365.866057] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.866551] env[61473]: ERROR nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.866551] env[61473]: Faults: ['InvalidArgument'] [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Traceback (most recent call last): [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] yield resources [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self.driver.spawn(context, instance, image_meta, [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self._fetch_image_if_missing(context, vi) [ 1365.866551] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] image_cache(vi, tmp_image_ds_loc) [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] vm_util.copy_virtual_disk( [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] session._wait_for_task(vmdk_copy_task) [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return self.wait_for_task(task_ref) [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return evt.wait() [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] result = hub.switch() [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1365.869259] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return self.greenlet.switch() [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self.f(*self.args, **self.kw) [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] raise exceptions.translate_fault(task_info.error) [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Faults: ['InvalidArgument'] [ 1365.869711] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] [ 1365.869711] env[61473]: INFO nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Terminating instance [ 1365.869711] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.870106] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.870106] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58f236f2-cb4b-4dc8-a15e-e2c510630aa7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.870982] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1365.871193] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1365.871976] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfd8d5b-0801-4d41-b785-b6945eebd250 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.878541] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1365.878764] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9217d4ce-df27-4b9f-8e82-47ea3d8047a8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.880849] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.881035] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1365.881953] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e3b332-2aae-4f36-bfbd-09ab9b9edff4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.886749] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for the task: (returnval){ [ 1365.886749] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52d76126-17fe-4f35-0ea4-62657710b33e" [ 1365.886749] env[61473]: _type = "Task" [ 1365.886749] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.893631] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52d76126-17fe-4f35-0ea4-62657710b33e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.017047] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1366.017242] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1366.017556] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleting the datastore file [datastore2] 3a350a34-7728-493f-a737-7a6a3071363e {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1366.017843] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b0acb2b-b6e1-4f77-b0c5-34e208979180 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.024194] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1366.024194] env[61473]: value = "task-4281649" [ 1366.024194] env[61473]: _type = "Task" [ 1366.024194] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.031541] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.397502] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1366.397764] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Creating directory with path [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1366.397996] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7053a36-de6d-4965-8369-83084b59dd06 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.410799] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Created directory with path [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1366.410979] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Fetch image to [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1366.411168] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1366.411915] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a290a8-9bd3-4328-a811-b941ae4b2b54 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.418271] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddfa2e0-0f21-41db-b078-b4869475cd3b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.427115] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62af4c4-b8f4-44e9-9369-24f94e1ed43c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.456689] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01fbe09-5ea3-481f-b86e-19ff79eb56c3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.461987] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a08f203a-4a0a-48b3-b8d5-2e98243146ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.482769] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1366.533692] env[61473]: DEBUG oslo_vmware.api [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09492} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.533889] env[61473]: DEBUG oslo_vmware.rw_handles [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1366.535244] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.535456] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1366.535671] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1366.535849] env[61473]: INFO nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1366.538496] env[61473]: DEBUG nova.compute.claims [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1366.538664] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.538922] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.599548] env[61473]: DEBUG oslo_vmware.rw_handles [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1366.599759] env[61473]: DEBUG oslo_vmware.rw_handles [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1366.823694] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110b97e6-e05b-49d7-a72e-7a8016bfab2b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.833324] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a49442-f1fc-476a-9549-d21e5b8e07f3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.877901] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902b2a79-775a-47bb-9f5a-b5499bb1e033 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.884526] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6714a0e-5c74-4aed-9222-ef994b05c0b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.897070] env[61473]: DEBUG nova.compute.provider_tree [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.905694] env[61473]: DEBUG nova.scheduler.client.report [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1366.919403] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.919907] env[61473]: ERROR nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1366.919907] env[61473]: Faults: ['InvalidArgument'] [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Traceback (most recent call last): [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self.driver.spawn(context, instance, image_meta, [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self._fetch_image_if_missing(context, vi) [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] image_cache(vi, tmp_image_ds_loc) [ 1366.919907] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] vm_util.copy_virtual_disk( [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] session._wait_for_task(vmdk_copy_task) [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return self.wait_for_task(task_ref) [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return evt.wait() [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] result = hub.switch() [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] return self.greenlet.switch() [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1366.920310] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] self.f(*self.args, **self.kw) [ 1366.920625] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1366.920625] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] raise exceptions.translate_fault(task_info.error) [ 1366.920625] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1366.920625] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Faults: ['InvalidArgument'] [ 1366.920625] env[61473]: ERROR nova.compute.manager [instance: 3a350a34-7728-493f-a737-7a6a3071363e] [ 1366.920625] env[61473]: DEBUG nova.compute.utils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1366.922270] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Build of instance 3a350a34-7728-493f-a737-7a6a3071363e was re-scheduled: A specified parameter was not correct: fileType [ 1366.922270] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1366.922646] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1366.922819] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1366.922992] env[61473]: DEBUG nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1366.923176] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1367.248971] env[61473]: DEBUG nova.network.neutron [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.264640] env[61473]: INFO nova.compute.manager [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Took 0.34 seconds to deallocate network for instance. [ 1367.365636] env[61473]: INFO nova.scheduler.client.report [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted allocations for instance 3a350a34-7728-493f-a737-7a6a3071363e [ 1367.385034] env[61473]: DEBUG oslo_concurrency.lockutils [None req-990ab223-f6c7-436f-9b03-cdb3d5ef1297 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.842s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.386157] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.216s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.386386] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "3a350a34-7728-493f-a737-7a6a3071363e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.386626] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.386802] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.388688] env[61473]: INFO nova.compute.manager [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Terminating instance [ 1367.390445] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.390637] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.390713] env[61473]: DEBUG nova.network.neutron [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1367.399611] env[61473]: DEBUG nova.compute.manager [None req-042d6f0a-a899-4694-a201-e5abe67715bf tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 7e96360f-c62a-474e-a73e-9d7db6384987] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1367.423531] env[61473]: DEBUG nova.network.neutron [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1367.426545] env[61473]: DEBUG nova.compute.manager [None req-042d6f0a-a899-4694-a201-e5abe67715bf tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 7e96360f-c62a-474e-a73e-9d7db6384987] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1367.461056] env[61473]: DEBUG oslo_concurrency.lockutils [None req-042d6f0a-a899-4694-a201-e5abe67715bf tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "7e96360f-c62a-474e-a73e-9d7db6384987" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.579s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.471937] env[61473]: DEBUG nova.compute.manager [None req-dcbc0336-2056-4e90-80a8-ad4849c11c1b tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] [instance: 8886a746-98cf-465d-b869-ebbe734ffa3c] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1367.503054] env[61473]: DEBUG nova.compute.manager [None req-dcbc0336-2056-4e90-80a8-ad4849c11c1b tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] [instance: 8886a746-98cf-465d-b869-ebbe734ffa3c] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1367.528024] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dcbc0336-2056-4e90-80a8-ad4849c11c1b tempest-AttachVolumeShelveTestJSON-1974781682 tempest-AttachVolumeShelveTestJSON-1974781682-project-member] Lock "8886a746-98cf-465d-b869-ebbe734ffa3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.841s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.537795] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1367.593323] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.593323] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.593666] env[61473]: INFO nova.compute.claims [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.596862] env[61473]: DEBUG nova.network.neutron [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.604453] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "refresh_cache-3a350a34-7728-493f-a737-7a6a3071363e" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.604825] env[61473]: DEBUG nova.compute.manager [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1367.605024] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1367.605528] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83355ba1-722b-4d8e-a3ae-7796309cce7c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.616237] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b557819-020f-46d2-9894-718da38aeb27 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.647082] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a350a34-7728-493f-a737-7a6a3071363e could not be found. [ 1367.647280] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1367.647458] env[61473]: INFO nova.compute.manager [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1367.647698] env[61473]: DEBUG oslo.service.loopingcall [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1367.647915] env[61473]: DEBUG nova.compute.manager [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1367.648025] env[61473]: DEBUG nova.network.neutron [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1367.668026] env[61473]: DEBUG nova.network.neutron [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1367.675373] env[61473]: DEBUG nova.network.neutron [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.684398] env[61473]: INFO nova.compute.manager [-] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] Took 0.04 seconds to deallocate network for instance. [ 1367.818880] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ebbec599-90ac-4aea-8470-47da738bbaf5 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "3a350a34-7728-493f-a737-7a6a3071363e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.432s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.819594] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "3a350a34-7728-493f-a737-7a6a3071363e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 85.630s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.819787] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 3a350a34-7728-493f-a737-7a6a3071363e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1367.820268] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "3a350a34-7728-493f-a737-7a6a3071363e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.886792] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e286a511-2fc9-42b3-a08d-d3c72ada605c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.894448] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2909ea1-413f-489d-b844-3a4509386e33 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.925591] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a74b381-7e35-4fa5-aca4-d8541fbe895b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.932785] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4698ce1e-2057-4860-8910-7987928adb0c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.946816] env[61473]: DEBUG nova.compute.provider_tree [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.955956] env[61473]: DEBUG nova.scheduler.client.report [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.969522] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.377s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.969939] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1368.003468] env[61473]: DEBUG nova.compute.utils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1368.004890] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1368.005078] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1368.012964] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1368.075682] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1368.080363] env[61473]: DEBUG nova.policy [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa72838d6ec74c2ebac9d403f5ac1cf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5fd5d032e047b8b77b2b727a03f01c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1368.100956] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.101209] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.101373] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.101554] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.101700] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.101847] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.102088] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.102301] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.102481] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.102647] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.102822] env[61473]: DEBUG nova.virt.hardware [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.103678] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bed8a0-ceaa-4cea-bd8d-837ef4fc3769 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.111478] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a33607-1c67-4573-b876-2012c8566840 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.436761] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Successfully created port: 8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1369.354843] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Successfully updated port: 8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.369690] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.369690] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.369690] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1369.378787] env[61473]: DEBUG nova.compute.manager [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Received event network-vif-plugged-8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1369.379023] env[61473]: DEBUG oslo_concurrency.lockutils [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] Acquiring lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.379287] env[61473]: DEBUG oslo_concurrency.lockutils [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.379422] env[61473]: DEBUG oslo_concurrency.lockutils [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.379536] env[61473]: DEBUG nova.compute.manager [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] No waiting events found dispatching network-vif-plugged-8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1369.379701] env[61473]: WARNING nova.compute.manager [req-85b9d89d-791e-4452-b6bd-7a2a9e99dc72 req-6d9894b4-2cee-4884-8458-51355babaf64 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Received unexpected event network-vif-plugged-8e8bdcd9-adfc-44e2-801a-7965d26257b5 for instance with vm_state building and task_state spawning. [ 1369.431531] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1369.668826] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Updating instance_info_cache with network_info: [{"id": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "address": "fa:16:3e:07:f1:f9", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e8bdcd9-ad", "ovs_interfaceid": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.680816] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.681125] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance network_info: |[{"id": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "address": "fa:16:3e:07:f1:f9", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e8bdcd9-ad", "ovs_interfaceid": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1369.681539] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:f1:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e8bdcd9-adfc-44e2-801a-7965d26257b5', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.690366] env[61473]: DEBUG oslo.service.loopingcall [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.690828] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1369.691090] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d30d346-523c-4e46-87d7-7d57d5ec8b58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.717119] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.717119] env[61473]: value = "task-4281650" [ 1369.717119] env[61473]: _type = "Task" [ 1369.717119] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.724878] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281650, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.231955] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281650, 'name': CreateVM_Task, 'duration_secs': 0.288972} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.231955] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1370.231955] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.231955] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.232380] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1370.232778] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b537e2bf-3e6c-4d05-a140-1aee318b3293 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.239771] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 1370.239771] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52dd7615-ebb2-cfb4-a1c8-200d7853704f" [ 1370.239771] env[61473]: _type = "Task" [ 1370.239771] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.253822] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52dd7615-ebb2-cfb4-a1c8-200d7853704f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.748025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.748331] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.748431] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.403922] env[61473]: DEBUG nova.compute.manager [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Received event network-changed-8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1371.403922] env[61473]: DEBUG nova.compute.manager [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Refreshing instance network info cache due to event network-changed-8e8bdcd9-adfc-44e2-801a-7965d26257b5. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1371.403985] env[61473]: DEBUG oslo_concurrency.lockutils [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] Acquiring lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.404120] env[61473]: DEBUG oslo_concurrency.lockutils [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] Acquired lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.404327] env[61473]: DEBUG nova.network.neutron [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Refreshing network info cache for port 8e8bdcd9-adfc-44e2-801a-7965d26257b5 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1371.664080] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.741514] env[61473]: DEBUG nova.network.neutron [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Updated VIF entry in instance network info cache for port 8e8bdcd9-adfc-44e2-801a-7965d26257b5. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1371.741880] env[61473]: DEBUG nova.network.neutron [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Updating instance_info_cache with network_info: [{"id": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "address": "fa:16:3e:07:f1:f9", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e8bdcd9-ad", "ovs_interfaceid": "8e8bdcd9-adfc-44e2-801a-7965d26257b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.752821] env[61473]: DEBUG oslo_concurrency.lockutils [req-6eb46d60-cbd5-481e-b2a8-67d2e345a2f0 req-53faa92f-9379-4397-985f-7cbbc6533824 service nova] Releasing lock "refresh_cache-21e47c1d-d2be-427c-8b09-4e8da3df126b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.344214] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.344694] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.966460] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.966849] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1411.967281] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.966792] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.966988] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1412.967129] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1412.995728] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996062] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996062] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996149] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996252] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996391] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996541] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996663] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996781] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.996900] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1412.997034] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1412.997540] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.997729] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.008686] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.008875] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.009406] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.009406] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1413.010365] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a19cd1b-d907-4bb4-be0c-70836ee5618d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.019627] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dca1649-24f9-4296-b890-038f660bc0a9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.034900] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d217a71-4bea-4919-b015-19824d239efc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.040946] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b724868b-8ea2-44f0-a216-6672e52c9e9b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.069393] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180621MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1413.069532] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.069717] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.149018] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149203] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 9031b0d9-4e07-4afa-a597-770b80df2511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149333] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149456] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149575] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149693] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149810] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.149927] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.150053] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.150172] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.163413] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.173547] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.183028] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance eb01e325-e0f5-4eee-8e3c-22d7389589a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.193552] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 83f00fd0-b61d-42c5-9232-a26da89f7b18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.203377] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8d8a25ed-cec9-4736-be45-0d41b62028ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.213321] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.223685] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca3c10ac-b3cf-4291-b070-42332b304686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.234340] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.234598] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1413.234747] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1413.424678] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fca0ce-39d1-4744-8f96-0006ec7b933c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.432468] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6120a13d-ea9f-4da6-bfab-624d631121a7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.462323] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ce3144-c3f0-45ff-9474-5180035d9f38 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.469150] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb7cc22-24ba-4483-a786-5976b90ca8e9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.482303] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.490901] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.505123] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1413.505270] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.436s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.474363] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.474680] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.966464] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.963468] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.202968] env[61473]: WARNING oslo_vmware.rw_handles [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1416.202968] env[61473]: ERROR oslo_vmware.rw_handles [ 1416.203377] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1416.206084] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1416.206436] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Copying Virtual Disk [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/ff8948de-4129-40e3-8823-2901b1b7a619/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1416.206753] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa5988f0-a10d-48fb-9ee2-a27fee7dab0b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.215318] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for the task: (returnval){ [ 1416.215318] env[61473]: value = "task-4281651" [ 1416.215318] env[61473]: _type = "Task" [ 1416.215318] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.223643] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Task: {'id': task-4281651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.725662] env[61473]: DEBUG oslo_vmware.exceptions [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1416.726052] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.726681] env[61473]: ERROR nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1416.726681] env[61473]: Faults: ['InvalidArgument'] [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Traceback (most recent call last): [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] yield resources [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self.driver.spawn(context, instance, image_meta, [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self._fetch_image_if_missing(context, vi) [ 1416.726681] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] image_cache(vi, tmp_image_ds_loc) [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] vm_util.copy_virtual_disk( [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] session._wait_for_task(vmdk_copy_task) [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return self.wait_for_task(task_ref) [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return evt.wait() [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] result = hub.switch() [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1416.727068] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return self.greenlet.switch() [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self.f(*self.args, **self.kw) [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] raise exceptions.translate_fault(task_info.error) [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Faults: ['InvalidArgument'] [ 1416.727372] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] [ 1416.727372] env[61473]: INFO nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Terminating instance [ 1416.728627] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.728835] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1416.729088] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff45b0ef-d6d0-430f-b689-ca75a09f0445 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.731233] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1416.731428] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1416.732146] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b7702e-6140-4755-9265-bf8a5fca927f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.738724] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1416.738937] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a79c43be-c445-4992-a650-c55deb077045 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.740962] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1416.741153] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1416.742082] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba734cd7-3657-499a-953c-3a20ae57a64e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.746776] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1416.746776] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52040e10-dd88-8aee-5a75-bd6afcd39efd" [ 1416.746776] env[61473]: _type = "Task" [ 1416.746776] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.753706] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52040e10-dd88-8aee-5a75-bd6afcd39efd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.812921] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1416.813163] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1416.813362] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Deleting the datastore file [datastore2] 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1416.813623] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-baed51a1-73ca-474d-97af-41d91f85c134 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.820048] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for the task: (returnval){ [ 1416.820048] env[61473]: value = "task-4281653" [ 1416.820048] env[61473]: _type = "Task" [ 1416.820048] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.828493] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Task: {'id': task-4281653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.257574] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1417.257889] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.258023] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bed452c-13f8-4e35-a345-156d3e9ba350 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.273615] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.273799] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Fetch image to [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1417.273972] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1417.274701] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0e5599-1db2-43b1-b1d5-10471948e47e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.280837] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4469b03-967b-46fc-b899-ba48fb493039 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.289579] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d64e8b-0971-4e0b-88d5-237e307450e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.319621] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ab0f58-2a94-4af7-9418-9d13990c6a2a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.329905] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f55d5746-4a00-4520-9a73-91f861ca57bb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.331480] env[61473]: DEBUG oslo_vmware.api [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Task: {'id': task-4281653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064698} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.331709] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1417.331884] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1417.332067] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1417.332243] env[61473]: INFO nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1417.334450] env[61473]: DEBUG nova.compute.claims [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1417.334710] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.335011] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.352997] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1417.405540] env[61473]: DEBUG oslo_vmware.rw_handles [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1417.467751] env[61473]: DEBUG oslo_vmware.rw_handles [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1417.467823] env[61473]: DEBUG oslo_vmware.rw_handles [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1417.641663] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abcb947-6c52-46f8-aeb2-7ae26fb26ccf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.649655] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ee77ed-34d0-4c0d-a2c7-bc9470f33fbf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.679752] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4f62f8-3532-47e5-8c12-66dbea5bf623 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.687041] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e8a9cf-3f56-4ddb-969a-668446af83da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.700354] env[61473]: DEBUG nova.compute.provider_tree [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.709070] env[61473]: DEBUG nova.scheduler.client.report [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1417.723332] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.388s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.723855] env[61473]: ERROR nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1417.723855] env[61473]: Faults: ['InvalidArgument'] [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Traceback (most recent call last): [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self.driver.spawn(context, instance, image_meta, [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self._fetch_image_if_missing(context, vi) [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] image_cache(vi, tmp_image_ds_loc) [ 1417.723855] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] vm_util.copy_virtual_disk( [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] session._wait_for_task(vmdk_copy_task) [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return self.wait_for_task(task_ref) [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return evt.wait() [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] result = hub.switch() [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] return self.greenlet.switch() [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1417.724165] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] self.f(*self.args, **self.kw) [ 1417.724717] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1417.724717] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] raise exceptions.translate_fault(task_info.error) [ 1417.724717] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1417.724717] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Faults: ['InvalidArgument'] [ 1417.724717] env[61473]: ERROR nova.compute.manager [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] [ 1417.724717] env[61473]: DEBUG nova.compute.utils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1417.726396] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Build of instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 was re-scheduled: A specified parameter was not correct: fileType [ 1417.726396] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1417.726813] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1417.726942] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1417.727155] env[61473]: DEBUG nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1417.727321] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1418.063929] env[61473]: DEBUG nova.network.neutron [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.076814] env[61473]: INFO nova.compute.manager [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Took 0.35 seconds to deallocate network for instance. [ 1418.189720] env[61473]: INFO nova.scheduler.client.report [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Deleted allocations for instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 [ 1418.218098] env[61473]: DEBUG oslo_concurrency.lockutils [None req-56026074-167a-4d0d-ad35-ed5e237f4f3c tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 574.409s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.219159] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 378.257s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.219405] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Acquiring lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.220130] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.220357] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.222567] env[61473]: INFO nova.compute.manager [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Terminating instance [ 1418.224540] env[61473]: DEBUG nova.compute.manager [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1418.224741] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1418.225239] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa369ec4-61a9-4672-a922-5fda367313b6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.232580] env[61473]: DEBUG nova.compute.manager [None req-411606f2-4226-45b1-8326-2edfcd455c2d tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: 271c4f52-31ac-43ea-9bfb-5adf561684c1] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1418.239246] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131d21ac-0a56-4198-aa75-f29748f360e8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.255919] env[61473]: DEBUG nova.compute.manager [None req-411606f2-4226-45b1-8326-2edfcd455c2d tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: 271c4f52-31ac-43ea-9bfb-5adf561684c1] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1418.269437] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3 could not be found. [ 1418.269693] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1418.269798] env[61473]: INFO nova.compute.manager [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1418.270050] env[61473]: DEBUG oslo.service.loopingcall [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.272101] env[61473]: DEBUG nova.compute.manager [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1418.272205] env[61473]: DEBUG nova.network.neutron [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1418.282080] env[61473]: DEBUG oslo_concurrency.lockutils [None req-411606f2-4226-45b1-8326-2edfcd455c2d tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "271c4f52-31ac-43ea-9bfb-5adf561684c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.221s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.292533] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1418.301727] env[61473]: DEBUG nova.network.neutron [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.311930] env[61473]: INFO nova.compute.manager [-] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] Took 0.04 seconds to deallocate network for instance. [ 1418.344026] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.344129] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.345525] env[61473]: INFO nova.compute.claims [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.411151] env[61473]: DEBUG oslo_concurrency.lockutils [None req-80314040-4900-4218-a1cd-5c0712a21e32 tempest-ServerActionsTestJSON-333932757 tempest-ServerActionsTestJSON-333932757-project-member] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.412009] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 136.223s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.412212] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 442f43ff-5589-4c3d-a7c1-e36cb24dcfa3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1418.412384] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "442f43ff-5589-4c3d-a7c1-e36cb24dcfa3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.584309] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d00888-bee0-4615-8a81-4785cf4b3f93 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.591903] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cb4c71-b776-4a50-8e50-2fb8834dc9fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.621528] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad299f8-1376-49ee-a2c0-7b422c24c5fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.628495] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86af9378-a7fe-4b15-9440-ae8e7d47bea3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.642050] env[61473]: DEBUG nova.compute.provider_tree [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.650331] env[61473]: DEBUG nova.scheduler.client.report [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.663871] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.664329] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1418.717418] env[61473]: DEBUG nova.compute.utils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.718702] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Not allocating networking since 'none' was specified. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1418.730706] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1418.822256] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1418.847842] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1418.848107] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1418.848273] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1418.848457] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1418.848605] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1418.848752] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1418.849066] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1418.849255] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1418.849446] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1418.849587] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1418.849761] env[61473]: DEBUG nova.virt.hardware [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1418.850634] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a0b246-7c53-4324-b9ff-778a49ee2465 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.859191] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf014bea-fdb2-4a36-b094-aa461e9febd0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.872726] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance VIF info [] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.879035] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Creating folder: Project (10dcaea0a7bb4f289692fd17d6bd54eb). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1418.879319] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c713ef4e-4370-4be1-ae6a-ecba7d841ebf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.888492] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Created folder: Project (10dcaea0a7bb4f289692fd17d6bd54eb) in parent group-v843485. [ 1418.888681] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Creating folder: Instances. Parent ref: group-v843563. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1418.888897] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d547472e-70e2-4687-82ce-7176e6d6a72e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.898088] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Created folder: Instances in parent group-v843563. [ 1418.898319] env[61473]: DEBUG oslo.service.loopingcall [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.898497] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1418.898686] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01f11321-caa6-4804-a280-6030c490566b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.914279] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.914279] env[61473]: value = "task-4281656" [ 1418.914279] env[61473]: _type = "Task" [ 1418.914279] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.921243] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281656, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.423864] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281656, 'name': CreateVM_Task, 'duration_secs': 0.262446} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.424994] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1419.424994] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.425129] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.425446] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.425705] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-648d4369-9bb6-4657-b504-0eb7b9bc11b6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.430042] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for the task: (returnval){ [ 1419.430042] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]528c47f3-f535-ab58-3618-a1396c805c0b" [ 1419.430042] env[61473]: _type = "Task" [ 1419.430042] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.437234] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]528c47f3-f535-ab58-3618-a1396c805c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.945274] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.945561] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.945796] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.420963] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.421261] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.212851] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.743809] env[61473]: WARNING oslo_vmware.rw_handles [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1466.743809] env[61473]: ERROR oslo_vmware.rw_handles [ 1466.744418] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1466.746300] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1466.746587] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Copying Virtual Disk [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/824a33ab-05b1-4a3b-8fc3-561a071de36d/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1466.746928] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21d9508e-05f1-43d3-9be2-e30d1102d1a5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.755066] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1466.755066] env[61473]: value = "task-4281657" [ 1466.755066] env[61473]: _type = "Task" [ 1466.755066] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.762953] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.266415] env[61473]: DEBUG oslo_vmware.exceptions [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1467.266804] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.267434] env[61473]: ERROR nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1467.267434] env[61473]: Faults: ['InvalidArgument'] [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Traceback (most recent call last): [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] yield resources [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self.driver.spawn(context, instance, image_meta, [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self._fetch_image_if_missing(context, vi) [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1467.267434] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] image_cache(vi, tmp_image_ds_loc) [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] vm_util.copy_virtual_disk( [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] session._wait_for_task(vmdk_copy_task) [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return self.wait_for_task(task_ref) [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return evt.wait() [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] result = hub.switch() [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return self.greenlet.switch() [ 1467.267802] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self.f(*self.args, **self.kw) [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] raise exceptions.translate_fault(task_info.error) [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Faults: ['InvalidArgument'] [ 1467.268223] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] [ 1467.268223] env[61473]: INFO nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Terminating instance [ 1467.270365] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.274031] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1467.274031] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1467.274031] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1467.274031] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38a1fab4-d738-4f98-9d63-902598443190 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.274543] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40302e30-5011-4fba-a6e7-eae84e8d381c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.281434] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1467.282521] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa57bf69-06dc-4315-a67b-1b0e0404cce3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.283987] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1467.284345] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1467.285035] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d8c9c2-faa4-4b96-86cd-03117ec0c156 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.290678] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for the task: (returnval){ [ 1467.290678] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52ad2f2e-8722-b7b6-1570-22244ebcbc8f" [ 1467.290678] env[61473]: _type = "Task" [ 1467.290678] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.298560] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52ad2f2e-8722-b7b6-1570-22244ebcbc8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.350060] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1467.350280] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1467.350465] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleting the datastore file [datastore2] 9031b0d9-4e07-4afa-a597-770b80df2511 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1467.350737] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4e4e136-217a-4900-a784-ce17a2428d97 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.357525] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1467.357525] env[61473]: value = "task-4281659" [ 1467.357525] env[61473]: _type = "Task" [ 1467.357525] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.365349] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.801268] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1467.801534] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Creating directory with path [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1467.801769] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97ba9d7b-d922-443f-b4c6-82c4986530ca {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.813129] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Created directory with path [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1467.813255] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Fetch image to [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1467.813429] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1467.814289] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed0cf8a-d981-4d2e-9820-334ba296e266 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.820946] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4db6d4-34b4-47e5-b1d9-8a8d6de51e4b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.829871] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f99fbd-6481-4572-95e8-5b9eaad9a10c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.863582] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c248de-9f29-40be-8cbd-33721e0815b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.872249] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e069deaa-f5a5-495d-9ed9-d64a2a2f0435 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.873919] env[61473]: DEBUG oslo_vmware.api [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0777} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.874178] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1467.874361] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1467.874532] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1467.874702] env[61473]: INFO nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1467.877063] env[61473]: DEBUG nova.compute.claims [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1467.877237] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.877452] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.895855] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1467.953022] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1468.011841] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1468.012053] env[61473]: DEBUG oslo_vmware.rw_handles [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1468.181134] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfbd495-f680-46e5-b402-6acdca8b667a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.189107] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da747b88-3622-431f-9d11-9a46897a41af {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.218311] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a3de0e-31f2-45f0-8fc8-200b9bcc4d39 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.225421] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b079d40-864f-4372-8e3a-72bc221e4a83 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.239665] env[61473]: DEBUG nova.compute.provider_tree [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1468.249422] env[61473]: DEBUG nova.scheduler.client.report [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1468.263936] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.386s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.264496] env[61473]: ERROR nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1468.264496] env[61473]: Faults: ['InvalidArgument'] [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Traceback (most recent call last): [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self.driver.spawn(context, instance, image_meta, [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self._fetch_image_if_missing(context, vi) [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] image_cache(vi, tmp_image_ds_loc) [ 1468.264496] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] vm_util.copy_virtual_disk( [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] session._wait_for_task(vmdk_copy_task) [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return self.wait_for_task(task_ref) [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return evt.wait() [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] result = hub.switch() [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] return self.greenlet.switch() [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1468.264854] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] self.f(*self.args, **self.kw) [ 1468.265217] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1468.265217] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] raise exceptions.translate_fault(task_info.error) [ 1468.265217] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1468.265217] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Faults: ['InvalidArgument'] [ 1468.265217] env[61473]: ERROR nova.compute.manager [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] [ 1468.265217] env[61473]: DEBUG nova.compute.utils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1468.266638] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Build of instance 9031b0d9-4e07-4afa-a597-770b80df2511 was re-scheduled: A specified parameter was not correct: fileType [ 1468.266638] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1468.267043] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1468.267226] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1468.267392] env[61473]: DEBUG nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1468.267551] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1468.611908] env[61473]: DEBUG nova.network.neutron [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.622846] env[61473]: INFO nova.compute.manager [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Took 0.36 seconds to deallocate network for instance. [ 1468.710068] env[61473]: INFO nova.scheduler.client.report [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted allocations for instance 9031b0d9-4e07-4afa-a597-770b80df2511 [ 1468.732612] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d6f560e4-35df-48eb-9523-4357b85e822b tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.730s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.734047] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.680s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.734145] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.734289] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.734458] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.736467] env[61473]: INFO nova.compute.manager [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Terminating instance [ 1468.738141] env[61473]: DEBUG nova.compute.manager [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1468.738338] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1468.738804] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c68e1a2-eb1b-4ca9-ac65-79cefdb4cda8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.745276] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1468.752594] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300d8f1d-439d-42d2-b77a-cb3d87203aa4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.787534] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9031b0d9-4e07-4afa-a597-770b80df2511 could not be found. [ 1468.787813] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1468.787988] env[61473]: INFO nova.compute.manager [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1468.788633] env[61473]: DEBUG oslo.service.loopingcall [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.788633] env[61473]: DEBUG nova.compute.manager [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1468.788739] env[61473]: DEBUG nova.network.neutron [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1468.815824] env[61473]: DEBUG nova.network.neutron [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.820459] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.820709] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.822207] env[61473]: INFO nova.compute.claims [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1468.827299] env[61473]: INFO nova.compute.manager [-] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] Took 0.04 seconds to deallocate network for instance. [ 1468.923128] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73ed27b7-dc6c-49c8-b309-08e3530c5b45 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.924376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 186.735s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.924640] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 9031b0d9-4e07-4afa-a597-770b80df2511] During sync_power_state the instance has a pending task (deleting). Skip. [ 1468.924843] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "9031b0d9-4e07-4afa-a597-770b80df2511" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.078539] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d78518-8f95-4e41-acb8-cb6118870376 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.086110] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b653873-9723-46c3-bcc1-38c37fc1fc76 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.115279] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c961c34-509a-4587-8ec1-c0861ccedbd7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.121945] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095c4331-fe53-4778-ade4-51433407b5fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.134492] env[61473]: DEBUG nova.compute.provider_tree [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.147629] env[61473]: DEBUG nova.scheduler.client.report [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1469.162471] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.342s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.162973] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1469.198946] env[61473]: DEBUG nova.compute.utils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.200280] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1469.200474] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1469.208960] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1469.266045] env[61473]: DEBUG nova.policy [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '679a463d26e64b3c8b61617fe97abf2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930bd6995c2a4a6d8b2f760d584e21bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1469.274522] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1469.300232] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1469.300493] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1469.300652] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1469.300871] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1469.301101] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1469.301267] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1469.301479] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1469.301647] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1469.301816] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1469.301984] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1469.302194] env[61473]: DEBUG nova.virt.hardware [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1469.303035] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51966b5d-27d7-4f3d-b6dc-2698f71f4fdb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.311129] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28f6269-f8ed-4c3c-857d-edf241cdbb87 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.602236] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Successfully created port: 6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1470.350286] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Successfully updated port: 6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1470.365547] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.365654] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.365878] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1470.403781] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1470.586490] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Updating instance_info_cache with network_info: [{"id": "6e70cfec-859c-41a1-91e4-b14d9098e933", "address": "fa:16:3e:6d:52:06", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e70cfec-85", "ovs_interfaceid": "6e70cfec-859c-41a1-91e4-b14d9098e933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.604015] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.604015] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance network_info: |[{"id": "6e70cfec-859c-41a1-91e4-b14d9098e933", "address": "fa:16:3e:6d:52:06", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e70cfec-85", "ovs_interfaceid": "6e70cfec-859c-41a1-91e4-b14d9098e933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1470.604193] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:52:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e70cfec-859c-41a1-91e4-b14d9098e933', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.609947] env[61473]: DEBUG oslo.service.loopingcall [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.610673] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1470.611042] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8259d8b4-7f19-4751-a373-aab275daa355 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.631775] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.631775] env[61473]: value = "task-4281660" [ 1470.631775] env[61473]: _type = "Task" [ 1470.631775] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.640074] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281660, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.649534] env[61473]: DEBUG nova.compute.manager [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Received event network-vif-plugged-6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1470.649682] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Acquiring lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.649831] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.650054] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.650232] env[61473]: DEBUG nova.compute.manager [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] No waiting events found dispatching network-vif-plugged-6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1470.650396] env[61473]: WARNING nova.compute.manager [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Received unexpected event network-vif-plugged-6e70cfec-859c-41a1-91e4-b14d9098e933 for instance with vm_state building and task_state spawning. [ 1470.650557] env[61473]: DEBUG nova.compute.manager [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Received event network-changed-6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1470.650751] env[61473]: DEBUG nova.compute.manager [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Refreshing instance network info cache due to event network-changed-6e70cfec-859c-41a1-91e4-b14d9098e933. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1470.650994] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Acquiring lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.651163] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Acquired lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.651327] env[61473]: DEBUG nova.network.neutron [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Refreshing network info cache for port 6e70cfec-859c-41a1-91e4-b14d9098e933 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1470.928839] env[61473]: DEBUG nova.network.neutron [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Updated VIF entry in instance network info cache for port 6e70cfec-859c-41a1-91e4-b14d9098e933. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1470.929173] env[61473]: DEBUG nova.network.neutron [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Updating instance_info_cache with network_info: [{"id": "6e70cfec-859c-41a1-91e4-b14d9098e933", "address": "fa:16:3e:6d:52:06", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e70cfec-85", "ovs_interfaceid": "6e70cfec-859c-41a1-91e4-b14d9098e933", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.939557] env[61473]: DEBUG oslo_concurrency.lockutils [req-dee260f9-3db7-4c18-a56d-35cb21881c6e req-ac07f8db-bc5a-48e1-9045-7c672b6fa2e4 service nova] Releasing lock "refresh_cache-e00abe75-5243-4ab2-801b-f1d5f023b46b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.142826] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281660, 'name': CreateVM_Task, 'duration_secs': 0.285914} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.142826] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1471.143018] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.143187] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.143495] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1471.143732] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d01baa3f-b553-4512-a9cb-409f6a761ad4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.148229] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1471.148229] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]525bc51b-b860-d457-ad71-f1f38721baef" [ 1471.148229] env[61473]: _type = "Task" [ 1471.148229] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.155596] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]525bc51b-b860-d457-ad71-f1f38721baef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.659534] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.659863] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1471.659996] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.966524] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.966694] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1472.966427] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.966824] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.983390] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.983559] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.983763] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.983944] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1472.985085] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179cb591-f496-46ed-9512-53442f5d8546 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.994272] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0a5445-0ce5-427a-a0ce-3a001e71e7af {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.009564] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8f9d12-56ea-4193-a9ea-d220876f6520 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.015786] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0db513-4806-48de-8581-63a8fccd1cbc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.044996] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180650MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1473.045158] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.045345] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.195777] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196034] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196222] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196362] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196484] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196601] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196751] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196862] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.196961] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.197090] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.218214] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance eb01e325-e0f5-4eee-8e3c-22d7389589a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.228612] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 83f00fd0-b61d-42c5-9232-a26da89f7b18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.238750] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8d8a25ed-cec9-4736-be45-0d41b62028ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.249363] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.260297] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca3c10ac-b3cf-4291-b070-42332b304686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.269439] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.280737] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.280737] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1473.280737] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1473.484462] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b1eedf-7ee0-47e4-b7f6-62e5b1d36368 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.492439] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521fea4d-8c16-4b42-85df-ea4f7bdc5555 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.524159] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dc8de6-f227-43d4-842e-13ef18b51b8a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.531494] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67a6a74-55a7-4ee6-8238-2a163045ea3b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.546340] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.556301] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.580215] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1473.580423] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.535s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.580596] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.580878] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1474.580913] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1474.601521] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.601687] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.601821] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.601952] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602243] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602410] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602555] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602653] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602791] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.602907] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1474.603039] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1474.966129] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.966375] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.966537] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.965497] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.975064] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.961951] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1481.577035] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.577035] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.961357] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.484955] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f788906-a7c3-469a-a939-a4444699c6b5 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "84f08516-611c-4455-950b-b332d854e939" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.485276] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f788906-a7c3-469a-a939-a4444699c6b5 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "84f08516-611c-4455-950b-b332d854e939" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.555330] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f634feab-d268-4279-b9fb-c6608ec6a180 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Acquiring lock "605fba76-9c25-4a0d-8e4e-0c7672c3a841" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.555758] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f634feab-d268-4279-b9fb-c6608ec6a180 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "605fba76-9c25-4a0d-8e4e-0c7672c3a841" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.761066] env[61473]: WARNING oslo_vmware.rw_handles [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1516.761066] env[61473]: ERROR oslo_vmware.rw_handles [ 1516.761862] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1516.763482] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1516.763720] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Copying Virtual Disk [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/cb26343e-6153-4dc0-b13e-8b8ad33cd67a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1516.764009] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd6d8dec-a0d0-4d89-8451-dda6a5c3d759 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.772502] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for the task: (returnval){ [ 1516.772502] env[61473]: value = "task-4281661" [ 1516.772502] env[61473]: _type = "Task" [ 1516.772502] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.780836] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Task: {'id': task-4281661, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.282649] env[61473]: DEBUG oslo_vmware.exceptions [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1517.282902] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.283478] env[61473]: ERROR nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1517.283478] env[61473]: Faults: ['InvalidArgument'] [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Traceback (most recent call last): [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] yield resources [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.driver.spawn(context, instance, image_meta, [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._fetch_image_if_missing(context, vi) [ 1517.283478] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] image_cache(vi, tmp_image_ds_loc) [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] vm_util.copy_virtual_disk( [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] session._wait_for_task(vmdk_copy_task) [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.wait_for_task(task_ref) [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return evt.wait() [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] result = hub.switch() [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1517.283805] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.greenlet.switch() [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.f(*self.args, **self.kw) [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] raise exceptions.translate_fault(task_info.error) [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Faults: ['InvalidArgument'] [ 1517.284153] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] [ 1517.284153] env[61473]: INFO nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Terminating instance [ 1517.285347] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.285555] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.286060] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.286216] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.286379] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1517.287330] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92034d32-8462-4eb7-a3be-990814c97937 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.296796] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.296969] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1517.297940] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00107acf-26c7-4d06-b1d9-c594960ecab3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.302930] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for the task: (returnval){ [ 1517.302930] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]527c24f8-2376-57ed-4f64-fc35f76bb51f" [ 1517.302930] env[61473]: _type = "Task" [ 1517.302930] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.310265] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]527c24f8-2376-57ed-4f64-fc35f76bb51f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.314557] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1517.376699] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.385861] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Releasing lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.386283] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1517.386506] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1517.387546] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ff00da-9de7-4915-a96f-c0606eec1c58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.395138] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1517.395352] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f407094-a30e-421f-ba4f-3442bdc1af28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.427544] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1517.427739] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1517.427921] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Deleting the datastore file [datastore2] 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1517.428165] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7070314-6acb-48fe-8233-fa5bd3dfce36 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.437092] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for the task: (returnval){ [ 1517.437092] env[61473]: value = "task-4281663" [ 1517.437092] env[61473]: _type = "Task" [ 1517.437092] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.444483] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Task: {'id': task-4281663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.813966] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1517.814327] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Creating directory with path [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.814452] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b8594a5-95a8-4848-97cd-309088205451 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.825293] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Created directory with path [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.825467] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Fetch image to [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1517.825632] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1517.826348] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d928828-1790-4fc8-a6c2-135530936ef2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.832638] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36fd390-74cf-4dd9-a27b-740adf236bc1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.841123] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11267196-039e-4192-96b4-ab87a5eba16e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.870724] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93026a7b-3b28-4546-b109-a94bda952b4e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.875717] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7e4059f7-e905-405d-9da7-e7fdc2dc65cc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.897052] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1517.945937] env[61473]: DEBUG oslo_vmware.api [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Task: {'id': task-4281663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044762} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.946215] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1517.947027] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1517.947027] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1517.947027] env[61473]: INFO nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1517.947027] env[61473]: DEBUG oslo.service.loopingcall [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.947942] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1517.949249] env[61473]: DEBUG nova.compute.manager [-] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1517.951710] env[61473]: DEBUG nova.compute.claims [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1517.951883] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.952112] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.010601] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1518.010808] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1518.225854] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5afd722-feb1-42e1-90ed-8e27b817cb6d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.233329] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d3f357-07e7-460a-99d2-fdecc3855a4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.262881] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7facae7e-05ed-4cc5-8b43-3bdbf2b4310d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.270034] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf82c532-497f-490a-b565-112403e91b18 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.283047] env[61473]: DEBUG nova.compute.provider_tree [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.291809] env[61473]: DEBUG nova.scheduler.client.report [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1518.305968] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.354s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.306492] env[61473]: ERROR nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1518.306492] env[61473]: Faults: ['InvalidArgument'] [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Traceback (most recent call last): [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.driver.spawn(context, instance, image_meta, [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._fetch_image_if_missing(context, vi) [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] image_cache(vi, tmp_image_ds_loc) [ 1518.306492] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] vm_util.copy_virtual_disk( [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] session._wait_for_task(vmdk_copy_task) [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.wait_for_task(task_ref) [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return evt.wait() [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] result = hub.switch() [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.greenlet.switch() [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1518.306878] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.f(*self.args, **self.kw) [ 1518.307196] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1518.307196] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] raise exceptions.translate_fault(task_info.error) [ 1518.307196] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1518.307196] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Faults: ['InvalidArgument'] [ 1518.307196] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] [ 1518.307196] env[61473]: DEBUG nova.compute.utils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1518.308865] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Build of instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 was re-scheduled: A specified parameter was not correct: fileType [ 1518.308865] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1518.309266] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1518.309485] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.309634] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.309797] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1518.334078] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1518.393830] env[61473]: DEBUG nova.network.neutron [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.404129] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Releasing lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.404501] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1518.404772] env[61473]: DEBUG nova.compute.manager [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1518.489088] env[61473]: INFO nova.scheduler.client.report [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Deleted allocations for instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 [ 1518.507625] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f39a79e-9f50-438f-a1fd-e5cfa823ce8d tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.931s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.508785] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.684s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.509039] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.509265] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.509432] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.511360] env[61473]: INFO nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Terminating instance [ 1518.512844] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquiring lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.513015] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Acquired lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.513187] env[61473]: DEBUG nova.network.neutron [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1518.521407] env[61473]: DEBUG nova.compute.manager [None req-6c7638a3-7f3b-44d2-a28a-f554ac3eda0e tempest-ServerAddressesTestJSON-137553904 tempest-ServerAddressesTestJSON-137553904-project-member] [instance: eb01e325-e0f5-4eee-8e3c-22d7389589a0] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1518.541220] env[61473]: DEBUG nova.network.neutron [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1518.551031] env[61473]: DEBUG nova.compute.manager [None req-6c7638a3-7f3b-44d2-a28a-f554ac3eda0e tempest-ServerAddressesTestJSON-137553904 tempest-ServerAddressesTestJSON-137553904-project-member] [instance: eb01e325-e0f5-4eee-8e3c-22d7389589a0] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1518.570841] env[61473]: DEBUG oslo_concurrency.lockutils [None req-6c7638a3-7f3b-44d2-a28a-f554ac3eda0e tempest-ServerAddressesTestJSON-137553904 tempest-ServerAddressesTestJSON-137553904-project-member] Lock "eb01e325-e0f5-4eee-8e3c-22d7389589a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.636s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.580388] env[61473]: DEBUG nova.compute.manager [None req-83256982-64c1-46a1-a406-b1f47bc9c096 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: 83f00fd0-b61d-42c5-9232-a26da89f7b18] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1518.602357] env[61473]: DEBUG nova.network.neutron [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.604035] env[61473]: DEBUG nova.compute.manager [None req-83256982-64c1-46a1-a406-b1f47bc9c096 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: 83f00fd0-b61d-42c5-9232-a26da89f7b18] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1518.610875] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Releasing lock "refresh_cache-4e0f0570-961b-4be0-aca9-b1115a2ab6b6" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.611327] env[61473]: DEBUG nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1518.611515] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1518.612017] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-009685ab-8812-498e-954c-58a91257d020 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.624127] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7ff2db-a3d5-452a-aa9a-f4e5e1f87730 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.634867] env[61473]: DEBUG oslo_concurrency.lockutils [None req-83256982-64c1-46a1-a406-b1f47bc9c096 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "83f00fd0-b61d-42c5-9232-a26da89f7b18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.160s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.643933] env[61473]: DEBUG nova.compute.manager [None req-851d99cd-e08a-4055-a7a7-995b4b8ce503 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 8d8a25ed-cec9-4736-be45-0d41b62028ad] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1518.655318] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4e0f0570-961b-4be0-aca9-b1115a2ab6b6 could not be found. [ 1518.655318] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1518.655461] env[61473]: INFO nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1518.655653] env[61473]: DEBUG oslo.service.loopingcall [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1518.656611] env[61473]: DEBUG nova.compute.manager [-] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1518.656611] env[61473]: DEBUG nova.network.neutron [-] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1518.667571] env[61473]: DEBUG nova.compute.manager [None req-851d99cd-e08a-4055-a7a7-995b4b8ce503 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 8d8a25ed-cec9-4736-be45-0d41b62028ad] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1518.688163] env[61473]: DEBUG oslo_concurrency.lockutils [None req-851d99cd-e08a-4055-a7a7-995b4b8ce503 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "8d8a25ed-cec9-4736-be45-0d41b62028ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.822s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.698347] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1518.749525] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.749792] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.751315] env[61473]: INFO nova.compute.claims [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1518.833840] env[61473]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1518.834093] env[61473]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-8b87441c-9e12-4fc6-ad04-2e539b72935b'] [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1518.834677] env[61473]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1518.835108] env[61473]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.835628] env[61473]: ERROR oslo.service.loopingcall [ 1518.836117] env[61473]: ERROR nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.868762] env[61473]: ERROR nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Traceback (most recent call last): [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] exception_handler_v20(status_code, error_body) [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] raise client_exc(message=error_message, [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Neutron server returns request_ids: ['req-8b87441c-9e12-4fc6-ad04-2e539b72935b'] [ 1518.868762] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] During handling of the above exception, another exception occurred: [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Traceback (most recent call last): [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._delete_instance(context, instance, bdms) [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._shutdown_instance(context, instance, bdms) [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._try_deallocate_network(context, instance, requested_networks) [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] with excutils.save_and_reraise_exception(): [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.869161] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.force_reraise() [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] raise self.value [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] _deallocate_network_with_retries() [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return evt.wait() [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] result = hub.switch() [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.greenlet.switch() [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1518.869530] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] result = func(*self.args, **self.kw) [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] result = f(*args, **kwargs) [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._deallocate_network( [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self.network_api.deallocate_for_instance( [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] data = neutron.list_ports(**search_opts) [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.list('ports', self.ports_path, retrieve_all, [ 1518.869867] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] for r in self._pagination(collection, path, **params): [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] res = self.get(path, params=params) [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.retry_request("GET", action, body=body, [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1518.870247] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] return self.do_request(method, action, body=body, [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] ret = obj(*args, **kwargs) [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] self._handle_fault_response(status_code, replybody, resp) [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.870607] env[61473]: ERROR nova.compute.manager [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] [ 1518.895416] env[61473]: DEBUG oslo_concurrency.lockutils [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.387s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.898641] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 236.709s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.898860] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] During sync_power_state the instance has a pending task (deleting). Skip. [ 1518.899052] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "4e0f0570-961b-4be0-aca9-b1115a2ab6b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.945483] env[61473]: INFO nova.compute.manager [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] [instance: 4e0f0570-961b-4be0-aca9-b1115a2ab6b6] Successfully reverted task state from None on failure for instance. [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server [None req-43848e86-07b5-4ed5-bc42-f1a85d8efc5b tempest-ServerShowV247Test-948988317 tempest-ServerShowV247Test-948988317-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-8b87441c-9e12-4fc6-ad04-2e539b72935b'] [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1518.948835] env[61473]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.949427] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3349, in terminate_instance [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in do_terminate_instance [ 1518.949938] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1518.950444] env[61473]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.950940] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1518.951556] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1518.952112] env[61473]: ERROR oslo_messaging.rpc.server [ 1519.009211] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6223f8a5-ac30-495a-b815-2bda851e2b42 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.016657] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb12fb20-2eef-458d-9fec-35c8e85f4bf5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.047316] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051d9e23-dd24-4d5b-adc0-3419ef86fa63 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.054127] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925a923d-b9b0-4c97-ae26-305ce1ef6162 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.066945] env[61473]: DEBUG nova.compute.provider_tree [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1519.076868] env[61473]: DEBUG nova.scheduler.client.report [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.091290] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.091739] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1519.133300] env[61473]: DEBUG nova.compute.utils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.134555] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1519.134729] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1519.146669] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1519.217212] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1519.219785] env[61473]: DEBUG nova.policy [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd79499f5069245d89c6cd1c51e1b754d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3672e7acb99b4704be2776c46cb348ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1519.243167] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1519.243167] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1519.243167] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1519.243375] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1519.243375] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1519.243375] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1519.243816] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1519.244160] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1519.244493] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1519.247086] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1519.247086] env[61473]: DEBUG nova.virt.hardware [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1519.247086] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ac566d-cfe4-432c-9484-4835530f8aa1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.254828] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23a8575-4f32-47d3-a0f5-cfc446c46163 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.588200] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Successfully created port: 181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1520.260717] env[61473]: DEBUG nova.compute.manager [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Received event network-vif-plugged-181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1520.260989] env[61473]: DEBUG oslo_concurrency.lockutils [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] Acquiring lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.261276] env[61473]: DEBUG oslo_concurrency.lockutils [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.261336] env[61473]: DEBUG oslo_concurrency.lockutils [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.261469] env[61473]: DEBUG nova.compute.manager [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] No waiting events found dispatching network-vif-plugged-181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1520.261633] env[61473]: WARNING nova.compute.manager [req-4f06675b-4093-4b54-a8b0-7fd41040f393 req-a9d0cafa-5a75-4a20-b2b8-b9c239d0e36c service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Received unexpected event network-vif-plugged-181dd1a9-6866-42b3-b953-d045bda7cbe7 for instance with vm_state building and task_state spawning. [ 1520.407567] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Successfully updated port: 181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.418049] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.418206] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.418362] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1520.468199] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1520.666117] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Updating instance_info_cache with network_info: [{"id": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "address": "fa:16:3e:de:7a:04", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181dd1a9-68", "ovs_interfaceid": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.678979] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.679265] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance network_info: |[{"id": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "address": "fa:16:3e:de:7a:04", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181dd1a9-68", "ovs_interfaceid": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1520.679655] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:7a:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7514a465-f1a4-4a8b-b76b-726b1a9d7e2f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '181dd1a9-6866-42b3-b953-d045bda7cbe7', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1520.687344] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating folder: Project (3672e7acb99b4704be2776c46cb348ec). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1520.687866] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-246abdf1-1c17-42df-b678-c1690bc1b1e8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.700138] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created folder: Project (3672e7acb99b4704be2776c46cb348ec) in parent group-v843485. [ 1520.700385] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating folder: Instances. Parent ref: group-v843567. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1520.700646] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c3d9a52-8ee5-44de-ad08-11f804cb17e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.710220] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created folder: Instances in parent group-v843567. [ 1520.710438] env[61473]: DEBUG oslo.service.loopingcall [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1520.710621] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1520.710799] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85c0ab3e-a0cc-44ad-a6e9-a6fc843d5638 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.730441] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.730441] env[61473]: value = "task-4281666" [ 1520.730441] env[61473]: _type = "Task" [ 1520.730441] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.737834] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281666, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.241043] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281666, 'name': CreateVM_Task, 'duration_secs': 0.286133} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.241172] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1521.241837] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.242010] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.242345] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.242581] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44b0843c-910a-41af-94ca-3106451e0218 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.247044] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 1521.247044] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5262fee7-11a1-060b-f755-43ccfa660a39" [ 1521.247044] env[61473]: _type = "Task" [ 1521.247044] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.254426] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5262fee7-11a1-060b-f755-43ccfa660a39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.757476] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.757832] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1521.757988] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.286110] env[61473]: DEBUG nova.compute.manager [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Received event network-changed-181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1522.286303] env[61473]: DEBUG nova.compute.manager [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Refreshing instance network info cache due to event network-changed-181dd1a9-6866-42b3-b953-d045bda7cbe7. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1522.286531] env[61473]: DEBUG oslo_concurrency.lockutils [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] Acquiring lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.286718] env[61473]: DEBUG oslo_concurrency.lockutils [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] Acquired lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.286887] env[61473]: DEBUG nova.network.neutron [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Refreshing network info cache for port 181dd1a9-6866-42b3-b953-d045bda7cbe7 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1522.533737] env[61473]: DEBUG nova.network.neutron [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Updated VIF entry in instance network info cache for port 181dd1a9-6866-42b3-b953-d045bda7cbe7. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1522.534122] env[61473]: DEBUG nova.network.neutron [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Updating instance_info_cache with network_info: [{"id": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "address": "fa:16:3e:de:7a:04", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181dd1a9-68", "ovs_interfaceid": "181dd1a9-6866-42b3-b953-d045bda7cbe7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.543347] env[61473]: DEBUG oslo_concurrency.lockutils [req-6e101a84-a1eb-4e27-9540-ce4f212d53af req-7c92be9f-83d3-498d-9c64-4f0a6ea544fe service nova] Releasing lock "refresh_cache-64dc3dee-8479-478b-87c8-2bb0ae0f99d4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.966501] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.527065] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.973674] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.966615] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.966802] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1534.967386] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.967386] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1534.967386] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1534.990120] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990278] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990411] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990559] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990680] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990802] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.990923] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.991053] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.991175] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.991294] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1534.991412] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1534.991900] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.992097] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.005960] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.006185] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.006353] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.006507] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1535.007609] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f34d2bd-6bf0-414c-9792-31e6efc80a80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.016383] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132a6637-78a6-48e7-9e7e-e9e60f1bbf36 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.030415] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f3c3f8-db8e-41ec-b33a-57dcc8d0fd88 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.036950] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7e8180-c52b-424e-85f9-30647fa7ed3d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.065575] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180634MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1535.065740] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.065939] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.182423] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a12b01db-28b4-477d-aef2-99304505d8c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.182590] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.182722] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.182844] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.182963] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.183099] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.183218] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.183331] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.183444] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.183555] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1535.195484] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ca3c10ac-b3cf-4291-b070-42332b304686 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.208855] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.220301] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.229864] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.239793] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 84f08516-611c-4455-950b-b332d854e939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.252971] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 605fba76-9c25-4a0d-8e4e-0c7672c3a841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1535.253223] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1535.253366] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1535.269252] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1535.284897] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1535.285097] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1535.295730] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1535.312999] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1535.483105] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1bbfde-c4ce-4985-8960-b94e6921626d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.490625] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c557b8-95c5-4cf4-9b45-49fa5cf8c6e4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.520979] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f6d41d-5ff4-4e4c-8ea8-e022daad43b2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.528143] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7ed184-84dd-40a9-9ad3-a28ba0e31358 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.540969] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1535.549355] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1535.563204] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1535.563381] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.497s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.537749] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.538566] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.966676] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.962594] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.966795] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.967109] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11255}} [ 1538.982323] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] There are 0 instances to clean {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 1538.982604] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.982785] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 1541.520897] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "19154895-863b-4468-8737-32105f98528b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.521319] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.568703] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "7c9630f0-b868-4029-a841-4569d984fc5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.569034] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "7c9630f0-b868-4029-a841-4569d984fc5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.597469] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "16e08841-5bb0-4d57-800c-ef036946acf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.598683] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "16e08841-5bb0-4d57-800c-ef036946acf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.780088] env[61473]: WARNING oslo_vmware.rw_handles [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1566.780088] env[61473]: ERROR oslo_vmware.rw_handles [ 1566.780833] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1566.782596] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1566.782854] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Copying Virtual Disk [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/e8fd07db-3fc1-4731-a024-145033b0e167/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1566.783174] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45130c20-80ea-4453-9ef8-838d95c07642 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.791315] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for the task: (returnval){ [ 1566.791315] env[61473]: value = "task-4281667" [ 1566.791315] env[61473]: _type = "Task" [ 1566.791315] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.798806] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Task: {'id': task-4281667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.301560] env[61473]: DEBUG oslo_vmware.exceptions [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1567.301905] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.302478] env[61473]: ERROR nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.302478] env[61473]: Faults: ['InvalidArgument'] [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Traceback (most recent call last): [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] yield resources [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self.driver.spawn(context, instance, image_meta, [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self._fetch_image_if_missing(context, vi) [ 1567.302478] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] image_cache(vi, tmp_image_ds_loc) [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] vm_util.copy_virtual_disk( [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] session._wait_for_task(vmdk_copy_task) [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return self.wait_for_task(task_ref) [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return evt.wait() [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] result = hub.switch() [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1567.302901] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return self.greenlet.switch() [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self.f(*self.args, **self.kw) [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] raise exceptions.translate_fault(task_info.error) [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Faults: ['InvalidArgument'] [ 1567.303328] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] [ 1567.303328] env[61473]: INFO nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Terminating instance [ 1567.304437] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.304682] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.304943] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff5f7165-316d-4c2e-a4b7-06b2c8889bc5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.307351] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1567.307620] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1567.308364] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90615457-fa61-42aa-823d-0e81a942101e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.315124] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1567.315364] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5b5059a-7c08-4d62-8604-c3c6cc01c0b2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.317586] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.317764] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1567.318729] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9779a4d-386c-4eaa-9f62-963d712195ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.323801] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1567.323801] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]522fdb3a-1801-705b-7690-defdf7925887" [ 1567.323801] env[61473]: _type = "Task" [ 1567.323801] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.331060] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]522fdb3a-1801-705b-7690-defdf7925887, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.394999] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1567.395253] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1567.395457] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Deleting the datastore file [datastore2] a12b01db-28b4-477d-aef2-99304505d8c9 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.395742] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa5df8b3-f6dd-4eb7-8501-30e337623f63 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.402412] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for the task: (returnval){ [ 1567.402412] env[61473]: value = "task-4281669" [ 1567.402412] env[61473]: _type = "Task" [ 1567.402412] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.410757] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Task: {'id': task-4281669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.834263] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1567.834661] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating directory with path [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.834806] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-597aed63-b9dc-4a04-b34d-93e597e9271f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.846459] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Created directory with path [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.846658] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Fetch image to [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1567.846830] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1567.847695] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43745951-da23-46e5-b3af-87191b208aaf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.854457] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a1ea80-f922-4cb5-a35c-3f8f6df77261 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.863542] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a628defa-729c-4b86-b8d5-761a33ba4e62 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.895833] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6111fe-40ca-46ab-816c-beba118ac302 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.901649] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-40171ab1-991c-4117-87fa-96af355fa034 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.911169] env[61473]: DEBUG oslo_vmware.api [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Task: {'id': task-4281669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071856} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.911409] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.911595] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1567.911769] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1567.911938] env[61473]: INFO nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1567.914103] env[61473]: DEBUG nova.compute.claims [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1567.914279] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.914491] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.930266] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1567.991920] env[61473]: DEBUG oslo_vmware.rw_handles [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1568.057615] env[61473]: DEBUG oslo_vmware.rw_handles [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1568.057847] env[61473]: DEBUG oslo_vmware.rw_handles [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1568.284740] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a381c6-86bf-4a7e-b820-9bed7b276626 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.292990] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5345116-5c30-4379-b08a-51750a0e8120 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.323039] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10063fdd-5075-4465-8314-194dc255bc97 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.330540] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba17b60a-2b83-4d2b-99e2-192b7f5f0531 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.345711] env[61473]: DEBUG nova.compute.provider_tree [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.355056] env[61473]: DEBUG nova.scheduler.client.report [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.374318] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.460s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.374920] env[61473]: ERROR nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1568.374920] env[61473]: Faults: ['InvalidArgument'] [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Traceback (most recent call last): [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self.driver.spawn(context, instance, image_meta, [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self._fetch_image_if_missing(context, vi) [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] image_cache(vi, tmp_image_ds_loc) [ 1568.374920] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] vm_util.copy_virtual_disk( [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] session._wait_for_task(vmdk_copy_task) [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return self.wait_for_task(task_ref) [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return evt.wait() [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] result = hub.switch() [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] return self.greenlet.switch() [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1568.375472] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] self.f(*self.args, **self.kw) [ 1568.375849] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1568.375849] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] raise exceptions.translate_fault(task_info.error) [ 1568.375849] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1568.375849] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Faults: ['InvalidArgument'] [ 1568.375849] env[61473]: ERROR nova.compute.manager [instance: a12b01db-28b4-477d-aef2-99304505d8c9] [ 1568.375849] env[61473]: DEBUG nova.compute.utils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1568.377335] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Build of instance a12b01db-28b4-477d-aef2-99304505d8c9 was re-scheduled: A specified parameter was not correct: fileType [ 1568.377335] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1568.377753] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1568.377922] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1568.378107] env[61473]: DEBUG nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1568.378275] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1568.783025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-81597bd6-9f5a-4cc3-bc86-bb529ec2c831 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "d5b8290a-2dd2-4a49-ba0d-1b88a5940833" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.783339] env[61473]: DEBUG oslo_concurrency.lockutils [None req-81597bd6-9f5a-4cc3-bc86-bb529ec2c831 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "d5b8290a-2dd2-4a49-ba0d-1b88a5940833" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.965612] env[61473]: DEBUG nova.network.neutron [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.980519] env[61473]: INFO nova.compute.manager [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Took 0.60 seconds to deallocate network for instance. [ 1569.077902] env[61473]: INFO nova.scheduler.client.report [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Deleted allocations for instance a12b01db-28b4-477d-aef2-99304505d8c9 [ 1569.099063] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef5d1501-8b6e-4294-8cc6-da95ffa81960 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.270s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.100188] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.828s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.100409] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.100614] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.100784] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.102700] env[61473]: INFO nova.compute.manager [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Terminating instance [ 1569.104353] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquiring lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.104510] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Acquired lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.104677] env[61473]: DEBUG nova.network.neutron [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1569.111369] env[61473]: DEBUG nova.compute.manager [None req-5a7e8c7e-7599-407a-b5fc-01fe20dd72a7 tempest-InstanceActionsTestJSON-171285110 tempest-InstanceActionsTestJSON-171285110-project-member] [instance: ca3c10ac-b3cf-4291-b070-42332b304686] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1569.136421] env[61473]: DEBUG nova.network.neutron [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1569.139306] env[61473]: DEBUG nova.compute.manager [None req-5a7e8c7e-7599-407a-b5fc-01fe20dd72a7 tempest-InstanceActionsTestJSON-171285110 tempest-InstanceActionsTestJSON-171285110-project-member] [instance: ca3c10ac-b3cf-4291-b070-42332b304686] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1569.166617] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5a7e8c7e-7599-407a-b5fc-01fe20dd72a7 tempest-InstanceActionsTestJSON-171285110 tempest-InstanceActionsTestJSON-171285110-project-member] Lock "ca3c10ac-b3cf-4291-b070-42332b304686" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.440s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.175897] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1569.223816] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.224223] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.225942] env[61473]: INFO nova.compute.claims [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.247669] env[61473]: DEBUG nova.network.neutron [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.257890] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Releasing lock "refresh_cache-a12b01db-28b4-477d-aef2-99304505d8c9" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.258298] env[61473]: DEBUG nova.compute.manager [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1569.258498] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1569.259021] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-930baa82-a482-4330-8525-122bbf9a3d28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.268408] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fccbc20-22da-4019-b83e-d0ad44994430 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.299443] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a12b01db-28b4-477d-aef2-99304505d8c9 could not be found. [ 1569.299651] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1569.299833] env[61473]: INFO nova.compute.manager [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1569.300089] env[61473]: DEBUG oslo.service.loopingcall [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.302579] env[61473]: DEBUG nova.compute.manager [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1569.302689] env[61473]: DEBUG nova.network.neutron [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1569.323250] env[61473]: DEBUG nova.network.neutron [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1569.330099] env[61473]: DEBUG nova.network.neutron [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.337970] env[61473]: INFO nova.compute.manager [-] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] Took 0.04 seconds to deallocate network for instance. [ 1569.429673] env[61473]: DEBUG oslo_concurrency.lockutils [None req-0c55d0dc-fa05-446f-8124-4c56e077f273 tempest-VolumesAdminNegativeTest-1652883978 tempest-VolumesAdminNegativeTest-1652883978-project-member] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.329s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.430524] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 287.240s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.430716] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a12b01db-28b4-477d-aef2-99304505d8c9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1569.430890] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "a12b01db-28b4-477d-aef2-99304505d8c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.492075] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91244a94-09a1-43e6-9d43-98522b8a9ea5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.499827] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515a3b91-80c0-4962-bc64-e073f53f38e8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.528377] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2cf132-1ad0-420f-b022-50bde654bb3e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.534887] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840a0567-ffc8-481e-a76c-39ff66d85593 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.547413] env[61473]: DEBUG nova.compute.provider_tree [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.559434] env[61473]: DEBUG nova.scheduler.client.report [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.575743] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.352s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.576392] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1569.608634] env[61473]: DEBUG nova.compute.utils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1569.610113] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1569.610294] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1569.622539] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1569.699042] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1569.702601] env[61473]: DEBUG nova.policy [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87ab43b761054ee596e9fb017641d1f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a9cefc76d9141ccb05feecf306d5912', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1569.727300] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.727569] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.727736] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.727921] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.728085] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.728224] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.728433] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.728607] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.728762] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.728923] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.729112] env[61473]: DEBUG nova.virt.hardware [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.729971] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0018524b-4590-4222-9394-4a9a1b6cca65 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.738237] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6fa7ce-7844-400f-b5c7-1059be40e917 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.152962] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Successfully created port: e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1570.792165] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Successfully updated port: e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.810028] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.810275] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquired lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.810352] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1570.870853] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1571.054321] env[61473]: DEBUG nova.compute.manager [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Received event network-vif-plugged-e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1571.054544] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Acquiring lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.054756] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.054924] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.055255] env[61473]: DEBUG nova.compute.manager [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] No waiting events found dispatching network-vif-plugged-e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1571.055500] env[61473]: WARNING nova.compute.manager [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Received unexpected event network-vif-plugged-e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 for instance with vm_state building and task_state spawning. [ 1571.055706] env[61473]: DEBUG nova.compute.manager [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Received event network-changed-e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1571.055899] env[61473]: DEBUG nova.compute.manager [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Refreshing instance network info cache due to event network-changed-e84f4a3b-6db8-43ca-b09c-08016d1e1ab2. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1571.056117] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Acquiring lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.268442] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Updating instance_info_cache with network_info: [{"id": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "address": "fa:16:3e:55:d1:22", "network": {"id": "146e1dff-8207-4330-8008-ab05a1f47473", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1497200854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9cefc76d9141ccb05feecf306d5912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f4a3b-6d", "ovs_interfaceid": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.283351] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Releasing lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.283592] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance network_info: |[{"id": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "address": "fa:16:3e:55:d1:22", "network": {"id": "146e1dff-8207-4330-8008-ab05a1f47473", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1497200854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9cefc76d9141ccb05feecf306d5912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f4a3b-6d", "ovs_interfaceid": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1571.283884] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Acquired lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.284071] env[61473]: DEBUG nova.network.neutron [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Refreshing network info cache for port e84f4a3b-6db8-43ca-b09c-08016d1e1ab2 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1571.285109] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:d1:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e84f4a3b-6db8-43ca-b09c-08016d1e1ab2', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.293179] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Creating folder: Project (9a9cefc76d9141ccb05feecf306d5912). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.295961] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fe1f80c-fcee-4715-9453-e131de1a8673 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.308991] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Created folder: Project (9a9cefc76d9141ccb05feecf306d5912) in parent group-v843485. [ 1571.309193] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Creating folder: Instances. Parent ref: group-v843570. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1571.309421] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-305a8739-1b88-4ea3-a810-e43862a2a676 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.318144] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Created folder: Instances in parent group-v843570. [ 1571.318380] env[61473]: DEBUG oslo.service.loopingcall [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.318802] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1571.318802] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c0b9beb-a4e7-4e00-a6fb-4307233899d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.339329] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.339329] env[61473]: value = "task-4281672" [ 1571.339329] env[61473]: _type = "Task" [ 1571.339329] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.346233] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281672, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.556854] env[61473]: DEBUG nova.network.neutron [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Updated VIF entry in instance network info cache for port e84f4a3b-6db8-43ca-b09c-08016d1e1ab2. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1571.557304] env[61473]: DEBUG nova.network.neutron [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Updating instance_info_cache with network_info: [{"id": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "address": "fa:16:3e:55:d1:22", "network": {"id": "146e1dff-8207-4330-8008-ab05a1f47473", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1497200854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9cefc76d9141ccb05feecf306d5912", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f4a3b-6d", "ovs_interfaceid": "e84f4a3b-6db8-43ca-b09c-08016d1e1ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.566708] env[61473]: DEBUG oslo_concurrency.lockutils [req-e64bde58-765d-486a-b964-6615e4b96582 req-8aa70225-cee6-43b9-a182-3708b84cd1f6 service nova] Releasing lock "refresh_cache-bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.849428] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281672, 'name': CreateVM_Task, 'duration_secs': 0.278019} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.849633] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1571.850373] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.850567] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.850920] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.851195] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69fa1349-df52-4b1b-a96d-aa60e95342be {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.855786] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for the task: (returnval){ [ 1571.855786] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52967d41-b23b-26d7-4fc3-8e2741b53f54" [ 1571.855786] env[61473]: _type = "Task" [ 1571.855786] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.864896] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52967d41-b23b-26d7-4fc3-8e2741b53f54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.366879] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.367161] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.367358] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.088471] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.992943] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.966625] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.967066] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.967066] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1596.966798] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.968061] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1596.968061] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1596.989263] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.989413] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.989548] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.989676] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.989798] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.989969] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.990144] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.990274] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.990392] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.990521] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1596.990643] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1596.991495] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.991704] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.002475] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.002690] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.002856] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.003013] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1597.004401] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa48277-2bdb-4c4d-8abf-cb5a5c7c333a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.012949] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb008d43-902e-47c3-9f16-6af7429f1f06 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.026453] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba413555-97a3-44af-971f-9ca6bd7954c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.032936] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246a1e4c-c957-4620-8cd2-57d0891b1781 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.062492] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180641MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1597.062637] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.062827] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.138086] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138274] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138401] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138522] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138638] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138753] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.138865] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.139041] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.139312] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.139461] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1597.152597] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.162910] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.171988] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 84f08516-611c-4455-950b-b332d854e939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.180834] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 605fba76-9c25-4a0d-8e4e-0c7672c3a841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.189399] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.198498] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7c9630f0-b868-4029-a841-4569d984fc5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.207362] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 16e08841-5bb0-4d57-800c-ef036946acf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.216192] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d5b8290a-2dd2-4a49-ba0d-1b88a5940833 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1597.216419] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1597.216565] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1597.411589] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8913642d-5625-4875-8b97-9304ac09875c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.420018] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed2772a-9bde-495c-ac81-8ce9bff0c37e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.449498] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08980d1-89d6-49b9-8505-c3b41f364ff2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.457026] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54ce85a-7ff8-4341-b5ba-f840814e6b75 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.469885] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1597.479613] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1597.496018] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1597.496227] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.433s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.470647] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.470928] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1599.962555] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1606.961884] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1615.431907] env[61473]: WARNING oslo_vmware.rw_handles [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1615.431907] env[61473]: ERROR oslo_vmware.rw_handles [ 1615.432473] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1615.434329] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1615.434583] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Copying Virtual Disk [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/69e65a19-a98a-42e6-99e5-e370a3911314/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1615.434873] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8081a908-7582-4385-abab-9c36e72b0c7c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.442444] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1615.442444] env[61473]: value = "task-4281673" [ 1615.442444] env[61473]: _type = "Task" [ 1615.442444] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.450060] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.952971] env[61473]: DEBUG oslo_vmware.exceptions [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1615.953278] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.953844] env[61473]: ERROR nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1615.953844] env[61473]: Faults: ['InvalidArgument'] [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] yield resources [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.driver.spawn(context, instance, image_meta, [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._fetch_image_if_missing(context, vi) [ 1615.953844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] image_cache(vi, tmp_image_ds_loc) [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] vm_util.copy_virtual_disk( [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] session._wait_for_task(vmdk_copy_task) [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.wait_for_task(task_ref) [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return evt.wait() [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = hub.switch() [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1615.954283] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.greenlet.switch() [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.f(*self.args, **self.kw) [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exceptions.translate_fault(task_info.error) [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Faults: ['InvalidArgument'] [ 1615.954739] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1615.954739] env[61473]: INFO nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Terminating instance [ 1615.955715] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.955928] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1615.956184] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ea722dd-d179-42a4-ac20-5502be8e1ee3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.958358] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1615.958591] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1615.959328] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a8f3be-9db2-4dd4-8816-e88d65889c71 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.966367] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1615.967578] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04255fcd-bd42-42b7-acab-d7e22df6a4bf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.968967] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1615.969156] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1615.969805] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdd5868f-ed71-46c3-93e9-a0a06242b58c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.974500] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for the task: (returnval){ [ 1615.974500] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5220a4b3-8cbc-63e5-454e-0864428c26db" [ 1615.974500] env[61473]: _type = "Task" [ 1615.974500] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.988593] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1615.988821] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Creating directory with path [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1615.989039] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5021badd-3efb-4ba9-ac1c-c95daca697a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.999260] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Created directory with path [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1615.999446] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Fetch image to [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1615.999626] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1616.000324] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a84e7a-70cf-4e87-a8f0-2f8db4c3cfcd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.006578] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e591dd-df1c-4624-89c6-8ac98d084ce7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.015211] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe328439-b78b-4c70-bbd6-2405a874789b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.047136] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7c4954-2680-4b50-8616-81bf45212620 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.049667] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1616.049859] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1616.050043] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleting the datastore file [datastore2] 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.050263] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d66590b1-1a66-47ab-8503-2bba5e4f4e42 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.054842] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-66e53ee3-7fa0-4662-a5c3-ca355de5cdf5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.057596] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for the task: (returnval){ [ 1616.057596] env[61473]: value = "task-4281675" [ 1616.057596] env[61473]: _type = "Task" [ 1616.057596] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.065547] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.079870] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1616.132323] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1616.190181] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1616.190396] env[61473]: DEBUG oslo_vmware.rw_handles [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1616.568886] env[61473]: DEBUG oslo_vmware.api [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Task: {'id': task-4281675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066929} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.569181] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.569377] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1616.569592] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1616.569768] env[61473]: INFO nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1616.571818] env[61473]: DEBUG nova.compute.claims [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1616.571987] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.572209] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.818262] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26de8dd-9467-47bd-a28c-947287628fce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.825669] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e122a5-d4d7-49b4-aac2-f4ccba2fa8da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.857029] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06b8d1d-a86e-487a-95a9-07745689cc59 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.864462] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9207fc66-a4aa-4cc3-b63e-f935b9fd875f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.879991] env[61473]: DEBUG nova.compute.provider_tree [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1616.888669] env[61473]: DEBUG nova.scheduler.client.report [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1616.903424] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.331s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.903519] env[61473]: ERROR nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1616.903519] env[61473]: Faults: ['InvalidArgument'] [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.driver.spawn(context, instance, image_meta, [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._fetch_image_if_missing(context, vi) [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] image_cache(vi, tmp_image_ds_loc) [ 1616.903519] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] vm_util.copy_virtual_disk( [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] session._wait_for_task(vmdk_copy_task) [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.wait_for_task(task_ref) [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return evt.wait() [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = hub.switch() [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.greenlet.switch() [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1616.903824] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.f(*self.args, **self.kw) [ 1616.904143] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1616.904143] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exceptions.translate_fault(task_info.error) [ 1616.904143] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1616.904143] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Faults: ['InvalidArgument'] [ 1616.904143] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1616.904280] env[61473]: DEBUG nova.compute.utils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1616.906028] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Build of instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 was re-scheduled: A specified parameter was not correct: fileType [ 1616.906028] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1616.906258] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1616.906487] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1616.906658] env[61473]: DEBUG nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1616.906819] env[61473]: DEBUG nova.network.neutron [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1617.006649] env[61473]: DEBUG neutronclient.v2_0.client [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1617.007696] env[61473]: ERROR nova.compute.manager [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.driver.spawn(context, instance, image_meta, [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._fetch_image_if_missing(context, vi) [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] image_cache(vi, tmp_image_ds_loc) [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1617.007696] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] vm_util.copy_virtual_disk( [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] session._wait_for_task(vmdk_copy_task) [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.wait_for_task(task_ref) [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return evt.wait() [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = hub.switch() [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.greenlet.switch() [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.f(*self.args, **self.kw) [ 1617.008026] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exceptions.translate_fault(task_info.error) [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Faults: ['InvalidArgument'] [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] During handling of the above exception, another exception occurred: [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2452, in _do_build_and_run_instance [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._build_and_run_instance(context, instance, image, [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2744, in _build_and_run_instance [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exception.RescheduledException( [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] nova.exception.RescheduledException: Build of instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 was re-scheduled: A specified parameter was not correct: fileType [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Faults: ['InvalidArgument'] [ 1617.008355] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] During handling of the above exception, another exception occurred: [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] exception_handler_v20(status_code, error_body) [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise client_exc(message=error_message, [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Neutron server returns request_ids: ['req-b5d10a2f-c77c-4585-b765-9284fd83d3cc'] [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.008723] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] During handling of the above exception, another exception occurred: [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3041, in _cleanup_allocated_networks [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._deallocate_network(context, instance, requested_networks) [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.network_api.deallocate_for_instance( [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] data = neutron.list_ports(**search_opts) [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.list('ports', self.ports_path, retrieve_all, [ 1617.009060] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] for r in self._pagination(collection, path, **params): [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] res = self.get(path, params=params) [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.retry_request("GET", action, body=body, [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1617.009404] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.do_request(method, action, body=body, [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._handle_fault_response(status_code, replybody, resp) [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exception.Unauthorized() [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] nova.exception.Unauthorized: Not authorized. [ 1617.009765] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.070459] env[61473]: INFO nova.scheduler.client.report [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Deleted allocations for instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 [ 1617.093162] env[61473]: DEBUG oslo_concurrency.lockutils [None req-48170b3b-377e-4d79-bb2c-297d2dd50beb tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 588.253s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.094466] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 392.631s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.094687] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Acquiring lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.094894] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.095075] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.097659] env[61473]: INFO nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Terminating instance [ 1617.099415] env[61473]: DEBUG nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1617.099626] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1617.100098] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95b987cb-ce4d-4661-8c45-3ae3294846fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.108780] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da5d7f8-cf01-41a9-994e-81da971919b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.137537] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02a53e4f-55aa-4d13-8f74-13ddfe37fae4 could not be found. [ 1617.137758] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1617.137932] env[61473]: INFO nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1617.138216] env[61473]: DEBUG oslo.service.loopingcall [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.138541] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1617.141058] env[61473]: DEBUG nova.compute.manager [-] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1617.141163] env[61473]: DEBUG nova.network.neutron [-] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1617.190606] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.190910] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.192421] env[61473]: INFO nova.compute.claims [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.239906] env[61473]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1617.239906] env[61473]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-59ea7c58-8d71-460e-a086-a0d227c2b770'] [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1617.240358] env[61473]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1617.240923] env[61473]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.241347] env[61473]: ERROR oslo.service.loopingcall [ 1617.241724] env[61473]: ERROR nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.271058] env[61473]: ERROR nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] exception_handler_v20(status_code, error_body) [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise client_exc(message=error_message, [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Neutron server returns request_ids: ['req-59ea7c58-8d71-460e-a086-a0d227c2b770'] [ 1617.271058] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] During handling of the above exception, another exception occurred: [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Traceback (most recent call last): [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._delete_instance(context, instance, bdms) [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._shutdown_instance(context, instance, bdms) [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._try_deallocate_network(context, instance, requested_networks) [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] with excutils.save_and_reraise_exception(): [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.271477] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.force_reraise() [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise self.value [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] _deallocate_network_with_retries() [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return evt.wait() [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = hub.switch() [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.greenlet.switch() [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1617.271844] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = func(*self.args, **self.kw) [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] result = f(*args, **kwargs) [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._deallocate_network( [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self.network_api.deallocate_for_instance( [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] data = neutron.list_ports(**search_opts) [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.list('ports', self.ports_path, retrieve_all, [ 1617.272206] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] for r in self._pagination(collection, path, **params): [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] res = self.get(path, params=params) [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.retry_request("GET", action, body=body, [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1617.272565] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] return self.do_request(method, action, body=body, [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] ret = obj(*args, **kwargs) [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] self._handle_fault_response(status_code, replybody, resp) [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.272927] env[61473]: ERROR nova.compute.manager [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] [ 1617.300019] env[61473]: DEBUG oslo_concurrency.lockutils [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.301142] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 335.111s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.301339] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1617.301513] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "02a53e4f-55aa-4d13-8f74-13ddfe37fae4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.347590] env[61473]: INFO nova.compute.manager [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] [instance: 02a53e4f-55aa-4d13-8f74-13ddfe37fae4] Successfully reverted task state from None on failure for instance. [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server [None req-d98e1786-da12-47b1-a57d-b8b369070c4f tempest-MigrationsAdminTest-532083582 tempest-MigrationsAdminTest-532083582-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-59ea7c58-8d71-460e-a086-a0d227c2b770'] [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1617.353666] env[61473]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.354198] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3349, in terminate_instance [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in do_terminate_instance [ 1617.354705] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1617.355155] env[61473]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.355623] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1617.356129] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1617.356637] env[61473]: ERROR oslo_messaging.rpc.server [ 1617.425141] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2e90df-6049-4781-a984-d3ce5db5da4d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.432554] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5e4622-aef7-417e-9b3d-5ddd114e54b2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.461548] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6ead18-9caa-44ac-82b1-b5770dd2c4b7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.467818] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a177b384-3ef4-4eda-b33c-4336b70f46ae {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.480161] env[61473]: DEBUG nova.compute.provider_tree [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.489521] env[61473]: DEBUG nova.scheduler.client.report [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1617.504496] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.314s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.504975] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1617.538877] env[61473]: DEBUG nova.compute.utils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1617.540501] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1617.540793] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1617.549103] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1617.617694] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1617.626013] env[61473]: DEBUG nova.policy [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eda5c2e486864d80b4b3f1415a181dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75fb9e80f03749519e953a48c30915c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1617.645324] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1617.645570] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1617.645736] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.645901] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1617.646055] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.646205] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1617.646418] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1617.646740] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1617.646740] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1617.646898] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1617.647086] env[61473]: DEBUG nova.virt.hardware [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1617.647928] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3aba279-c530-40e3-be59-cefccf523a77 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.661825] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abd200c-26fc-4fd8-ad2d-87cf83c732fd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.243263] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Successfully created port: 5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1618.871781] env[61473]: DEBUG nova.compute.manager [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Received event network-vif-plugged-5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1618.872042] env[61473]: DEBUG oslo_concurrency.lockutils [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] Acquiring lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.872221] env[61473]: DEBUG oslo_concurrency.lockutils [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.872389] env[61473]: DEBUG oslo_concurrency.lockutils [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.872600] env[61473]: DEBUG nova.compute.manager [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] No waiting events found dispatching network-vif-plugged-5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1618.872740] env[61473]: WARNING nova.compute.manager [req-20a64c00-03a7-49b0-9196-b7d7b1ec2b27 req-bc44d52f-4f1c-46b7-b628-c6d7aea96845 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Received unexpected event network-vif-plugged-5a796493-a69f-4194-addb-bd9e85720ca5 for instance with vm_state building and task_state spawning. [ 1618.927274] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Successfully updated port: 5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.937880] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.938044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.938201] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1618.999321] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1619.173047] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Updating instance_info_cache with network_info: [{"id": "5a796493-a69f-4194-addb-bd9e85720ca5", "address": "fa:16:3e:7e:9c:c8", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a796493-a6", "ovs_interfaceid": "5a796493-a69f-4194-addb-bd9e85720ca5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.186616] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.186912] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance network_info: |[{"id": "5a796493-a69f-4194-addb-bd9e85720ca5", "address": "fa:16:3e:7e:9c:c8", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a796493-a6", "ovs_interfaceid": "5a796493-a69f-4194-addb-bd9e85720ca5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1619.187891] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:9c:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a796493-a69f-4194-addb-bd9e85720ca5', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.194751] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating folder: Project (75fb9e80f03749519e953a48c30915c2). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1619.195310] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3862e00e-6400-42d7-9b4b-42da1b7476f9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.208669] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created folder: Project (75fb9e80f03749519e953a48c30915c2) in parent group-v843485. [ 1619.208872] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating folder: Instances. Parent ref: group-v843573. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1619.209126] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00be6897-3ce3-4226-9664-5bf2755f2c6f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.218796] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created folder: Instances in parent group-v843573. [ 1619.219029] env[61473]: DEBUG oslo.service.loopingcall [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.219220] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1619.219416] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22fd909a-9edc-486b-a3ac-45b39c492d1c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.239147] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.239147] env[61473]: value = "task-4281678" [ 1619.239147] env[61473]: _type = "Task" [ 1619.239147] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.246614] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281678, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.748491] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281678, 'name': CreateVM_Task, 'duration_secs': 0.332482} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.749118] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1619.749493] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.749741] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.750070] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1619.750306] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e2084b0-3d05-49b8-8d08-fd37f6b0aff7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.754549] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 1619.754549] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]521fa605-27b0-ff3d-9c7f-bad352d76987" [ 1619.754549] env[61473]: _type = "Task" [ 1619.754549] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.761889] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]521fa605-27b0-ff3d-9c7f-bad352d76987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.264821] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.265189] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.265318] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.950836] env[61473]: DEBUG nova.compute.manager [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Received event network-changed-5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1620.951108] env[61473]: DEBUG nova.compute.manager [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Refreshing instance network info cache due to event network-changed-5a796493-a69f-4194-addb-bd9e85720ca5. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1620.951298] env[61473]: DEBUG oslo_concurrency.lockutils [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] Acquiring lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.951441] env[61473]: DEBUG oslo_concurrency.lockutils [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] Acquired lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.951600] env[61473]: DEBUG nova.network.neutron [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Refreshing network info cache for port 5a796493-a69f-4194-addb-bd9e85720ca5 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1621.269387] env[61473]: DEBUG nova.network.neutron [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Updated VIF entry in instance network info cache for port 5a796493-a69f-4194-addb-bd9e85720ca5. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1621.269921] env[61473]: DEBUG nova.network.neutron [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Updating instance_info_cache with network_info: [{"id": "5a796493-a69f-4194-addb-bd9e85720ca5", "address": "fa:16:3e:7e:9c:c8", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a796493-a6", "ovs_interfaceid": "5a796493-a69f-4194-addb-bd9e85720ca5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.280393] env[61473]: DEBUG oslo_concurrency.lockutils [req-36acb46f-f145-40d3-8140-ac7784df9a97 req-0ac07b92-8005-408a-b560-4ea38a05ce20 service nova] Releasing lock "refresh_cache-c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.694760] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.968740] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.965914] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.966742] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.979302] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.979506] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.979674] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.979830] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1656.980968] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc65308-fc16-4c43-a865-02a2fb6922b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.989930] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffe9304-de9b-4f42-a10f-eff04669fcd7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.003977] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03517197-51ce-4551-ac4f-9dae2418f5fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.010286] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4ea490-70ae-435c-99a5-b6614f6cd814 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.040471] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180641MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1657.040623] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.040818] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.116720] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.116885] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117022] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117154] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117296] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117437] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117555] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117669] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117783] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.117905] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1657.129374] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.140627] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 84f08516-611c-4455-950b-b332d854e939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.151220] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 605fba76-9c25-4a0d-8e4e-0c7672c3a841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.162947] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.174854] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7c9630f0-b868-4029-a841-4569d984fc5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.186129] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 16e08841-5bb0-4d57-800c-ef036946acf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.197670] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d5b8290a-2dd2-4a49-ba0d-1b88a5940833 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1657.197798] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1657.198063] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1657.380244] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7186d2b-16f6-4390-a103-2296d118373f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.387837] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660b99fd-da6e-4798-a4c0-942f84381ea1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.417210] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c037fd5f-745d-4452-8b14-373839c5bfb9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.423640] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380b9b3f-3a8e-4528-b52b-c5338af6023b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.437395] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1657.446385] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1657.460795] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1657.461128] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.420s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.460925] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.461225] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.461378] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.461525] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1658.967603] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.967857] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1658.968135] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1658.988942] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989121] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989310] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989476] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989633] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989766] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.989888] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.990058] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.990137] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.990275] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1658.990400] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1659.966657] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1660.961979] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.829325] env[61473]: WARNING oslo_vmware.rw_handles [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1665.829325] env[61473]: ERROR oslo_vmware.rw_handles [ 1665.829981] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1665.831963] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1665.832223] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Copying Virtual Disk [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/b3acc15c-f9ec-418a-81c9-c6105f99e61c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1665.832503] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e99f1ab4-6aa9-4676-b152-7912998cb482 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.840188] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for the task: (returnval){ [ 1665.840188] env[61473]: value = "task-4281679" [ 1665.840188] env[61473]: _type = "Task" [ 1665.840188] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.848052] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Task: {'id': task-4281679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.350328] env[61473]: DEBUG oslo_vmware.exceptions [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1666.350631] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.351195] env[61473]: ERROR nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1666.351195] env[61473]: Faults: ['InvalidArgument'] [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Traceback (most recent call last): [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] yield resources [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self.driver.spawn(context, instance, image_meta, [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self._fetch_image_if_missing(context, vi) [ 1666.351195] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] image_cache(vi, tmp_image_ds_loc) [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] vm_util.copy_virtual_disk( [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] session._wait_for_task(vmdk_copy_task) [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return self.wait_for_task(task_ref) [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return evt.wait() [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] result = hub.switch() [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1666.351535] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return self.greenlet.switch() [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self.f(*self.args, **self.kw) [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] raise exceptions.translate_fault(task_info.error) [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Faults: ['InvalidArgument'] [ 1666.351867] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] [ 1666.351867] env[61473]: INFO nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Terminating instance [ 1666.353063] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.353274] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1666.353515] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-837f6eaf-f865-4f71-bce6-bef417011257 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.355617] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1666.355810] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1666.356514] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d905f2-4677-4fc5-9012-9d0508e2a4b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.363167] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1666.363381] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df20c678-b1e7-4c53-9661-4452f9bdcb42 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.365431] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1666.365606] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1666.366519] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2687ab53-79fc-4151-b102-413a3c8e63fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.371036] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for the task: (returnval){ [ 1666.371036] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52ff3146-f959-0363-ced7-3a20b97a03e8" [ 1666.371036] env[61473]: _type = "Task" [ 1666.371036] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.377983] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52ff3146-f959-0363-ced7-3a20b97a03e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.439933] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1666.440171] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1666.440356] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Deleting the datastore file [datastore2] d79207a6-43e0-474a-9c61-8a71a86da7a0 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1666.440610] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0407e7b0-360f-4829-94e8-5729f946a43b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.446955] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for the task: (returnval){ [ 1666.446955] env[61473]: value = "task-4281681" [ 1666.446955] env[61473]: _type = "Task" [ 1666.446955] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.454330] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Task: {'id': task-4281681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.881234] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1666.881559] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Creating directory with path [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1666.881752] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f77ae4a-d903-4fe1-aae9-56d4e4327447 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.892970] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Created directory with path [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1666.893170] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Fetch image to [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1666.893339] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1666.894090] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5df9567-c9ff-4306-8bef-7ef9e55458d7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.900360] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd362c2-6392-4c4f-8032-2da745cc8ab4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.909491] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a13bc9-4612-403a-bc13-71ca3c864d20 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.940057] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57a9dde-7f8c-4204-8fb2-efa4e9f4b552 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.945312] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1d764e04-e70c-4eeb-959b-f188f9e30c3a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.955058] env[61473]: DEBUG oslo_vmware.api [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Task: {'id': task-4281681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064804} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.955298] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1666.955479] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1666.955648] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1666.955819] env[61473]: INFO nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1666.957914] env[61473]: DEBUG nova.compute.claims [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1666.958098] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.958316] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.970662] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1667.027340] env[61473]: DEBUG oslo_vmware.rw_handles [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1667.086870] env[61473]: DEBUG oslo_vmware.rw_handles [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1667.086870] env[61473]: DEBUG oslo_vmware.rw_handles [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1667.265156] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37db5755-d085-489d-83a6-c886b4cfd5d8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.273141] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c960782a-e165-4638-bbf2-83c9bb4fc928 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.304154] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ed575-5b8d-459c-aec8-b652b3f1cebc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.311707] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ca5298-557e-4605-9f97-4efed96770e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.326071] env[61473]: DEBUG nova.compute.provider_tree [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.334677] env[61473]: DEBUG nova.scheduler.client.report [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1667.348586] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.390s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.348991] env[61473]: ERROR nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1667.348991] env[61473]: Faults: ['InvalidArgument'] [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Traceback (most recent call last): [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self.driver.spawn(context, instance, image_meta, [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self._fetch_image_if_missing(context, vi) [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] image_cache(vi, tmp_image_ds_loc) [ 1667.348991] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] vm_util.copy_virtual_disk( [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] session._wait_for_task(vmdk_copy_task) [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return self.wait_for_task(task_ref) [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return evt.wait() [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] result = hub.switch() [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] return self.greenlet.switch() [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1667.349419] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] self.f(*self.args, **self.kw) [ 1667.349743] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1667.349743] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] raise exceptions.translate_fault(task_info.error) [ 1667.349743] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1667.349743] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Faults: ['InvalidArgument'] [ 1667.349743] env[61473]: ERROR nova.compute.manager [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] [ 1667.349743] env[61473]: DEBUG nova.compute.utils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1667.351369] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Build of instance d79207a6-43e0-474a-9c61-8a71a86da7a0 was re-scheduled: A specified parameter was not correct: fileType [ 1667.351369] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1667.351752] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1667.351928] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1667.352117] env[61473]: DEBUG nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1667.352299] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1667.660297] env[61473]: DEBUG nova.network.neutron [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.670179] env[61473]: INFO nova.compute.manager [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Took 0.32 seconds to deallocate network for instance. [ 1667.756196] env[61473]: INFO nova.scheduler.client.report [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Deleted allocations for instance d79207a6-43e0-474a-9c61-8a71a86da7a0 [ 1667.776319] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f6b77a8e-db5b-4928-b5b6-3066e8dc3723 tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.808s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.777597] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.415s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.777819] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Acquiring lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.778041] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.778213] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.780807] env[61473]: INFO nova.compute.manager [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Terminating instance [ 1667.782956] env[61473]: DEBUG nova.compute.manager [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1667.783166] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1667.783426] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0987beb6-86f3-467c-9c8d-be6b27f08277 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.790977] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1667.797877] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525072f8-9296-426b-b669-3d9bd772b9ae {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.827518] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d79207a6-43e0-474a-9c61-8a71a86da7a0 could not be found. [ 1667.827749] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1667.827927] env[61473]: INFO nova.compute.manager [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1667.828196] env[61473]: DEBUG oslo.service.loopingcall [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1667.828426] env[61473]: DEBUG nova.compute.manager [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1667.828521] env[61473]: DEBUG nova.network.neutron [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1667.853021] env[61473]: DEBUG nova.network.neutron [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.855946] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.856199] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.857615] env[61473]: INFO nova.compute.claims [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1667.861140] env[61473]: INFO nova.compute.manager [-] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] Took 0.03 seconds to deallocate network for instance. [ 1667.963531] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f768eca3-1fdc-485d-b6cb-4318ce2e1a3a tempest-ServersV294TestFqdnHostnames-231693197 tempest-ServersV294TestFqdnHostnames-231693197-project-member] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.964388] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 385.773s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.964574] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: d79207a6-43e0-474a-9c61-8a71a86da7a0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1667.965045] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "d79207a6-43e0-474a-9c61-8a71a86da7a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.102728] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccedfa7-af91-4888-af3e-420674eafdad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.110450] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf8c93-56df-43fd-a7ca-a05b48f92a12 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.140508] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2107fc8d-6a5e-473b-8b04-367f644dbfe6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.147683] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b7d404-50f8-483f-8eaa-5c5738f8d0db {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.160221] env[61473]: DEBUG nova.compute.provider_tree [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.168868] env[61473]: DEBUG nova.scheduler.client.report [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1668.186693] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.330s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.187201] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1668.222029] env[61473]: DEBUG nova.compute.utils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1668.222801] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1668.223094] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1668.231713] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1668.293272] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1668.297883] env[61473]: DEBUG nova.policy [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdcc5c977bc54b2d91b1e869ba75aadb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0d85a8f50fa45a5b7f1c6ace8aed6ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1668.323057] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1668.323303] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1668.323462] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.323645] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1668.323790] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.323937] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1668.324161] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1668.324323] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1668.324492] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1668.324690] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1668.324829] env[61473]: DEBUG nova.virt.hardware [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1668.325679] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a0c54e-6a08-4c0d-a386-26c2d487d47c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.333992] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf34537-6560-40f0-be5b-5a0c2c84cf12 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.665191] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Successfully created port: 2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.395698] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Successfully updated port: 2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1669.409365] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.409365] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquired lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.409365] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1669.478413] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1669.689636] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Updating instance_info_cache with network_info: [{"id": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "address": "fa:16:3e:6d:d0:e1", "network": {"id": "f72532e6-9e15-4a0a-8f0e-b0ab1a4ee6df", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1277087048-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d85a8f50fa45a5b7f1c6ace8aed6ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bb798a9-4c7f-4361-9436-a5690283861a", "external-id": "nsx-vlan-transportzone-547", "segmentation_id": 547, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0e087-76", "ovs_interfaceid": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.697891] env[61473]: DEBUG nova.compute.manager [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Received event network-vif-plugged-2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1669.698114] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Acquiring lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.698324] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.698501] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.698671] env[61473]: DEBUG nova.compute.manager [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] No waiting events found dispatching network-vif-plugged-2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1669.698833] env[61473]: WARNING nova.compute.manager [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Received unexpected event network-vif-plugged-2ee0e087-76a5-4be1-8bea-58d2c7a6a574 for instance with vm_state building and task_state spawning. [ 1669.698989] env[61473]: DEBUG nova.compute.manager [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Received event network-changed-2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1669.699479] env[61473]: DEBUG nova.compute.manager [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Refreshing instance network info cache due to event network-changed-2ee0e087-76a5-4be1-8bea-58d2c7a6a574. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1669.699479] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Acquiring lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.705579] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Releasing lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.705852] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance network_info: |[{"id": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "address": "fa:16:3e:6d:d0:e1", "network": {"id": "f72532e6-9e15-4a0a-8f0e-b0ab1a4ee6df", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1277087048-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d85a8f50fa45a5b7f1c6ace8aed6ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bb798a9-4c7f-4361-9436-a5690283861a", "external-id": "nsx-vlan-transportzone-547", "segmentation_id": 547, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0e087-76", "ovs_interfaceid": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1669.706132] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Acquired lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.706312] env[61473]: DEBUG nova.network.neutron [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Refreshing network info cache for port 2ee0e087-76a5-4be1-8bea-58d2c7a6a574 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1669.707365] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:d0:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bb798a9-4c7f-4361-9436-a5690283861a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ee0e087-76a5-4be1-8bea-58d2c7a6a574', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1669.715204] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Creating folder: Project (f0d85a8f50fa45a5b7f1c6ace8aed6ed). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1669.716238] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c26fd04-e29a-484a-868c-5d663233204e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.728771] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Created folder: Project (f0d85a8f50fa45a5b7f1c6ace8aed6ed) in parent group-v843485. [ 1669.729218] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Creating folder: Instances. Parent ref: group-v843576. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1669.729218] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf370665-60c4-45e5-bd0a-f18fe21f5cca {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.738982] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Created folder: Instances in parent group-v843576. [ 1669.739218] env[61473]: DEBUG oslo.service.loopingcall [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1669.739435] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1669.739660] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07a9554b-277b-4f61-9ce4-9882d3299936 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.758792] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1669.758792] env[61473]: value = "task-4281684" [ 1669.758792] env[61473]: _type = "Task" [ 1669.758792] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.766104] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281684, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.036386] env[61473]: DEBUG nova.network.neutron [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Updated VIF entry in instance network info cache for port 2ee0e087-76a5-4be1-8bea-58d2c7a6a574. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1670.036748] env[61473]: DEBUG nova.network.neutron [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Updating instance_info_cache with network_info: [{"id": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "address": "fa:16:3e:6d:d0:e1", "network": {"id": "f72532e6-9e15-4a0a-8f0e-b0ab1a4ee6df", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1277087048-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d85a8f50fa45a5b7f1c6ace8aed6ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bb798a9-4c7f-4361-9436-a5690283861a", "external-id": "nsx-vlan-transportzone-547", "segmentation_id": 547, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee0e087-76", "ovs_interfaceid": "2ee0e087-76a5-4be1-8bea-58d2c7a6a574", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.047673] env[61473]: DEBUG oslo_concurrency.lockutils [req-7f4b082e-8323-494b-a191-d0c1c660d536 req-3d7211e8-887d-4dac-9e9c-7fb52229e242 service nova] Releasing lock "refresh_cache-cbbd16ce-8cea-4d08-b672-99da04f148e4" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.268904] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281684, 'name': CreateVM_Task, 'duration_secs': 0.274813} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.269087] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1670.269824] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.269929] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.270258] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1670.270529] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229ce252-b4b1-4029-a4cd-0a0e9dbe9c3a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.274635] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for the task: (returnval){ [ 1670.274635] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52fcc8e2-3efa-e5f5-065e-e0b6b5e8e9e3" [ 1670.274635] env[61473]: _type = "Task" [ 1670.274635] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.281541] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52fcc8e2-3efa-e5f5-065e-e0b6b5e8e9e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.784320] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.784608] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.784852] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.792208] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.792533] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.314522] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.441969] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "a6532eba-0297-4320-9357-165e482c3790" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.442320] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.859848] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.860187] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.966498] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1716.629151] env[61473]: WARNING oslo_vmware.rw_handles [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1716.629151] env[61473]: ERROR oslo_vmware.rw_handles [ 1716.629724] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1716.631617] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1716.631872] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Copying Virtual Disk [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/a89d337a-1db4-4f5b-aca0-41b3e810b5b5/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1716.632163] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2db2292-51db-4a97-adaa-547a18c890dd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.640014] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for the task: (returnval){ [ 1716.640014] env[61473]: value = "task-4281685" [ 1716.640014] env[61473]: _type = "Task" [ 1716.640014] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.647720] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Task: {'id': task-4281685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.966109] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.150287] env[61473]: DEBUG oslo_vmware.exceptions [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1717.150594] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.151181] env[61473]: ERROR nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1717.151181] env[61473]: Faults: ['InvalidArgument'] [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Traceback (most recent call last): [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] yield resources [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self.driver.spawn(context, instance, image_meta, [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self._fetch_image_if_missing(context, vi) [ 1717.151181] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] image_cache(vi, tmp_image_ds_loc) [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] vm_util.copy_virtual_disk( [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] session._wait_for_task(vmdk_copy_task) [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return self.wait_for_task(task_ref) [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return evt.wait() [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] result = hub.switch() [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1717.151543] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return self.greenlet.switch() [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self.f(*self.args, **self.kw) [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] raise exceptions.translate_fault(task_info.error) [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Faults: ['InvalidArgument'] [ 1717.151868] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] [ 1717.151868] env[61473]: INFO nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Terminating instance [ 1717.153132] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.153340] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1717.153574] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a0c23be-d1b5-4d2a-b49b-2e17193b8e91 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.157500] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1717.157698] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1717.158456] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c518f781-89ae-4e32-a13e-800bb45c0904 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.162515] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1717.162688] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1717.163675] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58efc5cf-acbf-4611-9fa7-447e6b47b3d9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.167546] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1717.168120] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf4fa620-3160-41ab-87e1-7fbeff71885b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.170353] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1717.170353] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]526e276e-d2d7-3a24-1970-79fd87698680" [ 1717.170353] env[61473]: _type = "Task" [ 1717.170353] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.178830] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]526e276e-d2d7-3a24-1970-79fd87698680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.237061] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1717.237296] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1717.237478] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Deleting the datastore file [datastore2] 7886aeef-40ea-45e5-afa4-d04ca469649e {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1717.237753] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1c2d3f1-4b81-4f2e-8683-b64945c55e4c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.245611] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for the task: (returnval){ [ 1717.245611] env[61473]: value = "task-4281687" [ 1717.245611] env[61473]: _type = "Task" [ 1717.245611] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.254525] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Task: {'id': task-4281687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.680962] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1717.681256] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1717.681478] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9506e955-2fa6-4e5b-9756-5152789f21f4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.692686] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1717.692893] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Fetch image to [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1717.693086] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1717.693827] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1332192b-44af-411f-af6f-34168c80f69a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.700188] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3f4911-450d-4f48-ade4-6e88d5086994 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.709081] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fad3015-dc61-4bf3-9b5e-3cd260d1142c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.738742] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a34a41c-3d6b-4d93-a1de-57a431ab1c8a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.744158] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6f9e0561-80d1-455d-b883-1a6798ff86dc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.752873] env[61473]: DEBUG oslo_vmware.api [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Task: {'id': task-4281687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065726} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.753128] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1717.753312] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1717.753527] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1717.753737] env[61473]: INFO nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1717.756005] env[61473]: DEBUG nova.compute.claims [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1717.756190] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.756436] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.765174] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1717.812928] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1717.878048] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1717.878048] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1717.965573] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.965794] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.965941] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1717.966109] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.977225] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.053052] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73297dd-21be-4d85-95f0-dbe35207fe19 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.060790] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c093be-d9da-4a3e-8fda-f6779016553b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.091478] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a1baf7-1512-4834-87ec-3c80b08d5c05 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.098523] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e318afc7-a842-4001-817c-3fa5fe4bc8ad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.112067] env[61473]: DEBUG nova.compute.provider_tree [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.121367] env[61473]: DEBUG nova.scheduler.client.report [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1718.137710] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.381s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.138297] env[61473]: ERROR nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1718.138297] env[61473]: Faults: ['InvalidArgument'] [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Traceback (most recent call last): [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self.driver.spawn(context, instance, image_meta, [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self._fetch_image_if_missing(context, vi) [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] image_cache(vi, tmp_image_ds_loc) [ 1718.138297] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] vm_util.copy_virtual_disk( [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] session._wait_for_task(vmdk_copy_task) [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return self.wait_for_task(task_ref) [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return evt.wait() [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] result = hub.switch() [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] return self.greenlet.switch() [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1718.138639] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] self.f(*self.args, **self.kw) [ 1718.138951] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1718.138951] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] raise exceptions.translate_fault(task_info.error) [ 1718.138951] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1718.138951] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Faults: ['InvalidArgument'] [ 1718.138951] env[61473]: ERROR nova.compute.manager [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] [ 1718.139097] env[61473]: DEBUG nova.compute.utils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1718.140124] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.163s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.140310] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.140461] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1718.141181] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Build of instance 7886aeef-40ea-45e5-afa4-d04ca469649e was re-scheduled: A specified parameter was not correct: fileType [ 1718.141181] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1718.141643] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1718.141873] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1718.142089] env[61473]: DEBUG nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1718.142276] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1718.144462] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed3742f-a590-4bed-a36c-325fff336cc7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.152986] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b4df99-e969-4746-acd6-3f285048a2dd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.166373] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c9dafb-4ed5-4ccb-81e2-39cc17f85b53 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.172495] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dabb25-ab56-4876-ae7a-09e801e3e294 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.204117] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180621MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1718.204117] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.204360] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.289707] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7886aeef-40ea-45e5-afa4-d04ca469649e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.289707] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 0367d64d-76f3-4483-bc17-77cd900569ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.289895] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.289895] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.289994] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.290107] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.290200] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.290315] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.290425] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.290622] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1718.301280] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.310902] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7c9630f0-b868-4029-a841-4569d984fc5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.320578] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 16e08841-5bb0-4d57-800c-ef036946acf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.330711] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance d5b8290a-2dd2-4a49-ba0d-1b88a5940833 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.340551] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.350286] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.360149] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1718.360376] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1718.360544] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=183GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1718.583046] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72f0574-c26d-424c-8ab2-5c6fc2fceaa4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.590534] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495738aa-9e6e-4070-a013-55c771c725b3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.619890] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4a7b87-e0bf-4941-a770-c38f01313fcb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.628062] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a63b79a-454d-4fd7-b4d5-224776222edc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.646102] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.656395] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1718.663832] env[61473]: DEBUG nova.network.neutron [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.673199] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1718.673604] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.469s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.674259] env[61473]: INFO nova.compute.manager [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Took 0.53 seconds to deallocate network for instance. [ 1718.766502] env[61473]: INFO nova.scheduler.client.report [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Deleted allocations for instance 7886aeef-40ea-45e5-afa4-d04ca469649e [ 1718.788662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-46692d2a-710c-460e-a1d1-4122e79e835c tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.458s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.789849] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 436.598s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.790105] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] During sync_power_state the instance has a pending task (spawning). Skip. [ 1718.790312] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.790956] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.123s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.791297] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Acquiring lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.791521] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.791689] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.793584] env[61473]: INFO nova.compute.manager [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Terminating instance [ 1718.795828] env[61473]: DEBUG nova.compute.manager [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1718.795828] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1718.796086] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-268cf939-c5d4-40ca-bda3-57df2d408de5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.806215] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a266a3-fa25-4116-88d1-4c7b860dde47 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.817362] env[61473]: DEBUG nova.compute.manager [None req-8f788906-a7c3-469a-a939-a4444699c6b5 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: 84f08516-611c-4455-950b-b332d854e939] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1718.837097] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7886aeef-40ea-45e5-afa4-d04ca469649e could not be found. [ 1718.837300] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1718.837483] env[61473]: INFO nova.compute.manager [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1718.837760] env[61473]: DEBUG oslo.service.loopingcall [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.837990] env[61473]: DEBUG nova.compute.manager [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1718.838106] env[61473]: DEBUG nova.network.neutron [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1718.848176] env[61473]: DEBUG nova.compute.manager [None req-8f788906-a7c3-469a-a939-a4444699c6b5 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: 84f08516-611c-4455-950b-b332d854e939] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1718.867771] env[61473]: DEBUG nova.network.neutron [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.874861] env[61473]: INFO nova.compute.manager [-] [instance: 7886aeef-40ea-45e5-afa4-d04ca469649e] Took 0.04 seconds to deallocate network for instance. [ 1718.876011] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8f788906-a7c3-469a-a939-a4444699c6b5 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "84f08516-611c-4455-950b-b332d854e939" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.391s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.890278] env[61473]: DEBUG nova.compute.manager [None req-f634feab-d268-4279-b9fb-c6608ec6a180 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 605fba76-9c25-4a0d-8e4e-0c7672c3a841] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1718.928331] env[61473]: DEBUG nova.compute.manager [None req-f634feab-d268-4279-b9fb-c6608ec6a180 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] [instance: 605fba76-9c25-4a0d-8e4e-0c7672c3a841] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1718.950079] env[61473]: DEBUG oslo_concurrency.lockutils [None req-f634feab-d268-4279-b9fb-c6608ec6a180 tempest-ServerDiskConfigTestJSON-1779168041 tempest-ServerDiskConfigTestJSON-1779168041-project-member] Lock "605fba76-9c25-4a0d-8e4e-0c7672c3a841" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.394s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.959335] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1719.012759] env[61473]: DEBUG oslo_concurrency.lockutils [None req-4fb17e19-f853-4917-b215-30175640e186 tempest-ServerMetadataTestJSON-1476780501 tempest-ServerMetadataTestJSON-1476780501-project-member] Lock "7886aeef-40ea-45e5-afa4-d04ca469649e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.222s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.018746] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.018987] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.020599] env[61473]: INFO nova.compute.claims [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1719.248148] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96a0cb9-f021-4c75-8dca-83d7d4420b49 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.256869] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73e0f75-8813-4391-a4d4-33b6cf5cbb73 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.285546] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e711ab34-35a6-4746-a05a-791084d72b4c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.293746] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa765e5-6dd4-4d83-83c7-68453fb4ac4d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.308027] env[61473]: DEBUG nova.compute.provider_tree [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.315048] env[61473]: DEBUG nova.scheduler.client.report [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1719.328210] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.309s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.328709] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1719.361088] env[61473]: DEBUG nova.compute.utils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1719.363093] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1719.363293] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1719.372782] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1719.430603] env[61473]: DEBUG nova.policy [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f73eee97d58843319711ca37f58ca7da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b4b83613836742da946102e5a4e4a74f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1719.440486] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1719.475235] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1719.475483] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1719.475647] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1719.475852] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1719.476020] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1719.476173] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1719.476382] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1719.476568] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1719.476765] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1719.476935] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1719.477147] env[61473]: DEBUG nova.virt.hardware [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1719.478280] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d369753-86b7-4adc-b4aa-92fd0f3e4b28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.485823] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7180a16-3726-484b-ab0b-25412377a6ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.674034] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.674251] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1719.674350] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1719.697749] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.697907] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.698182] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699123] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699123] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699123] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699123] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699123] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699332] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699332] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1719.699332] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1719.699726] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1720.089421] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Successfully created port: c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1720.683107] env[61473]: DEBUG nova.compute.manager [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Received event network-vif-plugged-c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1720.683373] env[61473]: DEBUG oslo_concurrency.lockutils [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] Acquiring lock "19154895-863b-4468-8737-32105f98528b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.683733] env[61473]: DEBUG oslo_concurrency.lockutils [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] Lock "19154895-863b-4468-8737-32105f98528b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.683809] env[61473]: DEBUG oslo_concurrency.lockutils [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] Lock "19154895-863b-4468-8737-32105f98528b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.684027] env[61473]: DEBUG nova.compute.manager [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] [instance: 19154895-863b-4468-8737-32105f98528b] No waiting events found dispatching network-vif-plugged-c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1720.684932] env[61473]: WARNING nova.compute.manager [req-0717a22c-2953-499f-805d-84362ba3b816 req-26e28c80-e825-45d1-9670-ae8ac257de3a service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Received unexpected event network-vif-plugged-c99d3fa8-ec60-4d38-9365-fee9f60ad516 for instance with vm_state building and task_state spawning. [ 1720.764709] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Successfully updated port: c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1720.775607] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.775750] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquired lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.775898] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1720.816939] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1720.987730] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.011908] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Updating instance_info_cache with network_info: [{"id": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "address": "fa:16:3e:0c:a7:f5", "network": {"id": "3e126d7b-02e6-4f97-9bab-142f2e66da0f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-816586849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4b83613836742da946102e5a4e4a74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc99d3fa8-ec", "ovs_interfaceid": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.025492] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Releasing lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.025759] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance network_info: |[{"id": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "address": "fa:16:3e:0c:a7:f5", "network": {"id": "3e126d7b-02e6-4f97-9bab-142f2e66da0f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-816586849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4b83613836742da946102e5a4e4a74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc99d3fa8-ec", "ovs_interfaceid": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1721.026393] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:a7:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c99d3fa8-ec60-4d38-9365-fee9f60ad516', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1721.037846] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Creating folder: Project (b4b83613836742da946102e5a4e4a74f). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1721.037846] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e98aa04-deb8-4bf7-9106-6779b2f2f260 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.048144] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Created folder: Project (b4b83613836742da946102e5a4e4a74f) in parent group-v843485. [ 1721.048332] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Creating folder: Instances. Parent ref: group-v843579. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1721.048568] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aeb0649-8a8a-40c4-9715-3bf25ac988e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.057621] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Created folder: Instances in parent group-v843579. [ 1721.057801] env[61473]: DEBUG oslo.service.loopingcall [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.057986] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19154895-863b-4468-8737-32105f98528b] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1721.058203] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c282cf1-e00e-440c-8062-133788e8d604 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.078398] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1721.078398] env[61473]: value = "task-4281690" [ 1721.078398] env[61473]: _type = "Task" [ 1721.078398] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.089385] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281690, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.588254] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281690, 'name': CreateVM_Task, 'duration_secs': 0.28515} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.588513] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19154895-863b-4468-8737-32105f98528b] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1721.589135] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.589308] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.589618] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1721.589859] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a283b23-21b4-401a-911b-51479369457e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.594247] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for the task: (returnval){ [ 1721.594247] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5200f208-7520-02c6-a1d6-38f81b0ba5f9" [ 1721.594247] env[61473]: _type = "Task" [ 1721.594247] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.601312] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5200f208-7520-02c6-a1d6-38f81b0ba5f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.965892] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1722.104325] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.104558] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1722.104745] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.709041] env[61473]: DEBUG nova.compute.manager [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Received event network-changed-c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1722.709289] env[61473]: DEBUG nova.compute.manager [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Refreshing instance network info cache due to event network-changed-c99d3fa8-ec60-4d38-9365-fee9f60ad516. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1722.709474] env[61473]: DEBUG oslo_concurrency.lockutils [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] Acquiring lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.709617] env[61473]: DEBUG oslo_concurrency.lockutils [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] Acquired lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.709832] env[61473]: DEBUG nova.network.neutron [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Refreshing network info cache for port c99d3fa8-ec60-4d38-9365-fee9f60ad516 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1722.966334] env[61473]: DEBUG nova.network.neutron [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Updated VIF entry in instance network info cache for port c99d3fa8-ec60-4d38-9365-fee9f60ad516. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1722.966697] env[61473]: DEBUG nova.network.neutron [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] [instance: 19154895-863b-4468-8737-32105f98528b] Updating instance_info_cache with network_info: [{"id": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "address": "fa:16:3e:0c:a7:f5", "network": {"id": "3e126d7b-02e6-4f97-9bab-142f2e66da0f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-816586849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b4b83613836742da946102e5a4e4a74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc99d3fa8-ec", "ovs_interfaceid": "c99d3fa8-ec60-4d38-9365-fee9f60ad516", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.977221] env[61473]: DEBUG oslo_concurrency.lockutils [req-94e74bcd-eade-4c1a-b200-b17d36112115 req-a99002b8-a827-4ee6-a0a4-2b136d7a930b service nova] Releasing lock "refresh_cache-19154895-863b-4468-8737-32105f98528b" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.984619] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.984946] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.961761] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.782496] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "19154895-863b-4468-8737-32105f98528b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.486112] env[61473]: WARNING oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1765.486112] env[61473]: ERROR oslo_vmware.rw_handles [ 1765.486112] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1765.488297] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1765.488546] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Copying Virtual Disk [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/405a6913-6a7b-4ee5-907c-8f76a6a1cf1f/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1765.488845] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96de158d-9ed7-4091-9872-08b3011df8ad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.498349] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1765.498349] env[61473]: value = "task-4281691" [ 1765.498349] env[61473]: _type = "Task" [ 1765.498349] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.509378] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.583996] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.584255] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.008864] env[61473]: DEBUG oslo_vmware.exceptions [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1766.009218] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.009801] env[61473]: ERROR nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.009801] env[61473]: Faults: ['InvalidArgument'] [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Traceback (most recent call last): [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] yield resources [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self.driver.spawn(context, instance, image_meta, [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self._fetch_image_if_missing(context, vi) [ 1766.009801] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] image_cache(vi, tmp_image_ds_loc) [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] vm_util.copy_virtual_disk( [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] session._wait_for_task(vmdk_copy_task) [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return self.wait_for_task(task_ref) [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return evt.wait() [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] result = hub.switch() [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1766.010166] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return self.greenlet.switch() [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self.f(*self.args, **self.kw) [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] raise exceptions.translate_fault(task_info.error) [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Faults: ['InvalidArgument'] [ 1766.010563] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] [ 1766.010563] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Terminating instance [ 1766.011765] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.012019] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1766.012290] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63d5513a-b64f-4757-a6cd-7deb5ac6e4ea {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.014483] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1766.014672] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1766.015403] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798df838-8f14-4ad4-9e07-4307a190d99f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.023917] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1766.024185] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e67d579-f7f2-4d7e-a0a4-bf4c2e7ba2da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.026345] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1766.026522] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1766.027521] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b6b6d13-4f5a-4abd-891f-996b9df96937 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.032231] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1766.032231] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]526c67c4-6253-1bcc-5020-8232fc260fa1" [ 1766.032231] env[61473]: _type = "Task" [ 1766.032231] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.039557] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]526c67c4-6253-1bcc-5020-8232fc260fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.094436] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1766.094626] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1766.094778] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleting the datastore file [datastore2] 0367d64d-76f3-4483-bc17-77cd900569ef {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1766.095062] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0376a181-385b-4bc9-9d6b-48158e186d67 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.101561] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1766.101561] env[61473]: value = "task-4281693" [ 1766.101561] env[61473]: _type = "Task" [ 1766.101561] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.109305] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.543058] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1766.543058] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating directory with path [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1766.543058] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02fdb45e-bf88-4a2b-94fd-9ce32fee0b7e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.553929] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Created directory with path [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1766.554132] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Fetch image to [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1766.554314] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1766.555040] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39f11e2-dd96-467d-901c-7fd9977d624a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.561270] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9127ff29-016f-4808-a527-84ea389ad0df {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.569953] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eb7c11-3ad5-46f7-b486-8b49492d3a05 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.599080] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc0e34d-58cb-4239-b006-ace4941e969e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.606906] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b22060d-7d8c-4f31-9eaf-478369bd9e30 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.611204] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072202} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.611770] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.611948] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1766.612161] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1766.612334] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1766.614318] env[61473]: DEBUG nova.compute.claims [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1766.614482] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.614692] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.637471] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1766.690603] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1766.754754] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1766.754754] env[61473]: DEBUG oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1766.898676] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67477be7-ea30-4936-9678-58fb4443b18b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.905829] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77088be9-899a-443e-b644-a5402500ce90 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.934943] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58eb1c06-9745-44b8-944b-f7a43a09837d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.942562] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cf0aeb-6411-4540-a8a6-a99b3bce1fd4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.955325] env[61473]: DEBUG nova.compute.provider_tree [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1766.964323] env[61473]: DEBUG nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.978014] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.978557] env[61473]: ERROR nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.978557] env[61473]: Faults: ['InvalidArgument'] [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Traceback (most recent call last): [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self.driver.spawn(context, instance, image_meta, [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self._fetch_image_if_missing(context, vi) [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] image_cache(vi, tmp_image_ds_loc) [ 1766.978557] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] vm_util.copy_virtual_disk( [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] session._wait_for_task(vmdk_copy_task) [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return self.wait_for_task(task_ref) [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return evt.wait() [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] result = hub.switch() [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] return self.greenlet.switch() [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1766.979072] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] self.f(*self.args, **self.kw) [ 1766.979436] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1766.979436] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] raise exceptions.translate_fault(task_info.error) [ 1766.979436] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1766.979436] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Faults: ['InvalidArgument'] [ 1766.979436] env[61473]: ERROR nova.compute.manager [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] [ 1766.979436] env[61473]: DEBUG nova.compute.utils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1766.981979] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Build of instance 0367d64d-76f3-4483-bc17-77cd900569ef was re-scheduled: A specified parameter was not correct: fileType [ 1766.981979] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1766.982366] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1766.982546] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1766.982714] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1766.982876] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1767.293593] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.304660] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Took 0.32 seconds to deallocate network for instance. [ 1767.398192] env[61473]: INFO nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted allocations for instance 0367d64d-76f3-4483-bc17-77cd900569ef [ 1767.423245] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.563s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.424433] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.600s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.424717] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.424941] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.425132] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.427093] env[61473]: INFO nova.compute.manager [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Terminating instance [ 1767.428730] env[61473]: DEBUG nova.compute.manager [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1767.428930] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1767.429452] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56bd9ca0-ce0c-499b-bb9e-7d8e0d65b464 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.441205] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa8b958-3888-49b7-9b6b-ca35be3972f7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.453177] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 7c9630f0-b868-4029-a841-4569d984fc5e] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1767.474315] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0367d64d-76f3-4483-bc17-77cd900569ef could not be found. [ 1767.474720] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1767.475051] env[61473]: INFO nova.compute.manager [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1767.475446] env[61473]: DEBUG oslo.service.loopingcall [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.476461] env[61473]: DEBUG nova.compute.manager [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1767.476461] env[61473]: DEBUG nova.network.neutron [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1767.478244] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 7c9630f0-b868-4029-a841-4569d984fc5e] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1767.500056] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "7c9630f0-b868-4029-a841-4569d984fc5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.931s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.501527] env[61473]: DEBUG nova.network.neutron [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.508107] env[61473]: INFO nova.compute.manager [-] [instance: 0367d64d-76f3-4483-bc17-77cd900569ef] Took 0.03 seconds to deallocate network for instance. [ 1767.509916] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 16e08841-5bb0-4d57-800c-ef036946acf9] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1767.532050] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 16e08841-5bb0-4d57-800c-ef036946acf9] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1767.551615] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "16e08841-5bb0-4d57-800c-ef036946acf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.954s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.561587] env[61473]: DEBUG nova.compute.manager [None req-81597bd6-9f5a-4cc3-bc86-bb529ec2c831 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: d5b8290a-2dd2-4a49-ba0d-1b88a5940833] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1767.604558] env[61473]: DEBUG nova.compute.manager [None req-81597bd6-9f5a-4cc3-bc86-bb529ec2c831 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: d5b8290a-2dd2-4a49-ba0d-1b88a5940833] Instance disappeared before build. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2435}} [ 1767.626151] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9215e7d6-046b-48e7-a076-8c6a1d20a873 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "0367d64d-76f3-4483-bc17-77cd900569ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.638011] env[61473]: DEBUG oslo_concurrency.lockutils [None req-81597bd6-9f5a-4cc3-bc86-bb529ec2c831 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "d5b8290a-2dd2-4a49-ba0d-1b88a5940833" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.855s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.646787] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1767.695415] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.695662] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.697092] env[61473]: INFO nova.compute.claims [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1767.920388] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00893f4b-61f4-4544-8705-5c32378f46e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.927959] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc314f47-e26b-4f8a-a79d-7b5019c051ad {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.958960] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa5f9ed-ec1c-44c2-9f8d-7a8722ba3ab1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.966225] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe02f32e-7d46-42ee-b82c-0a00a566c1e7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.980490] env[61473]: DEBUG nova.compute.provider_tree [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.993032] env[61473]: DEBUG nova.scheduler.client.report [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.007929] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.312s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.008518] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1768.045479] env[61473]: DEBUG nova.compute.utils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1768.046746] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1768.046921] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1768.057349] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1768.113372] env[61473]: DEBUG nova.policy [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '679a463d26e64b3c8b61617fe97abf2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930bd6995c2a4a6d8b2f760d584e21bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1768.118146] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1768.143835] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1768.144082] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1768.144244] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1768.144424] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1768.144572] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1768.144721] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1768.144927] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1768.145097] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1768.145265] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1768.145427] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1768.145599] env[61473]: DEBUG nova.virt.hardware [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1768.146463] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79ca582-f9a4-4d8f-9bf2-f272fe603b46 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.154353] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9f049a-016b-475e-abd6-9b855604fa4e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.423868] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Successfully created port: 4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.136079] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Successfully updated port: 4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1769.156152] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.156365] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.156533] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1769.231797] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1769.336399] env[61473]: DEBUG nova.compute.manager [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Received event network-vif-plugged-4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1769.336513] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Acquiring lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.336676] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.336883] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.337013] env[61473]: DEBUG nova.compute.manager [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] No waiting events found dispatching network-vif-plugged-4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1769.337185] env[61473]: WARNING nova.compute.manager [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Received unexpected event network-vif-plugged-4738d7e1-e603-48ed-ab51-3a896ade3c79 for instance with vm_state building and task_state spawning. [ 1769.337333] env[61473]: DEBUG nova.compute.manager [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Received event network-changed-4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1769.337486] env[61473]: DEBUG nova.compute.manager [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Refreshing instance network info cache due to event network-changed-4738d7e1-e603-48ed-ab51-3a896ade3c79. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1769.337647] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Acquiring lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.412910] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Updating instance_info_cache with network_info: [{"id": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "address": "fa:16:3e:e3:08:98", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4738d7e1-e6", "ovs_interfaceid": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.428846] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.429146] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance network_info: |[{"id": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "address": "fa:16:3e:e3:08:98", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4738d7e1-e6", "ovs_interfaceid": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1769.429441] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Acquired lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.429617] env[61473]: DEBUG nova.network.neutron [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Refreshing network info cache for port 4738d7e1-e603-48ed-ab51-3a896ade3c79 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1769.430782] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:08:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4738d7e1-e603-48ed-ab51-3a896ade3c79', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1769.438361] env[61473]: DEBUG oslo.service.loopingcall [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1769.441070] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1769.441526] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c1f301b-28b0-426f-9838-84331995b650 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.463310] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1769.463310] env[61473]: value = "task-4281694" [ 1769.463310] env[61473]: _type = "Task" [ 1769.463310] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.470857] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281694, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.765555] env[61473]: DEBUG nova.network.neutron [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Updated VIF entry in instance network info cache for port 4738d7e1-e603-48ed-ab51-3a896ade3c79. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1769.765919] env[61473]: DEBUG nova.network.neutron [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Updating instance_info_cache with network_info: [{"id": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "address": "fa:16:3e:e3:08:98", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4738d7e1-e6", "ovs_interfaceid": "4738d7e1-e603-48ed-ab51-3a896ade3c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.775850] env[61473]: DEBUG oslo_concurrency.lockutils [req-6dfafc9b-ce84-4e87-914d-c7355a059cf2 req-46e982c1-6f17-4c12-a85d-08feff643b5d service nova] Releasing lock "refresh_cache-6b12b76a-d5a3-4a60-98e6-b0329389ca75" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.972746] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281694, 'name': CreateVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.473103] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281694, 'name': CreateVM_Task} progress is 99%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.974051] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281694, 'name': CreateVM_Task, 'duration_secs': 1.326415} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.974252] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1770.974916] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.975106] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.975420] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1770.975663] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85e882fe-c761-4eec-9dc3-06f537c71cd4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.979753] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1770.979753] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]525d2ce9-aa5f-b7d6-e8f5-29da3d605b99" [ 1770.979753] env[61473]: _type = "Task" [ 1770.979753] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.986776] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]525d2ce9-aa5f-b7d6-e8f5-29da3d605b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.490375] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.490733] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1771.490853] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.966608] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.966158] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.966402] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.966580] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.966733] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1778.966762] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.966647] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.966834] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1779.967088] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1779.989923] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990183] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990331] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990466] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990592] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990716] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990840] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.990958] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.991089] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.991212] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1779.991331] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1779.991912] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.002724] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.002854] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.002972] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.003136] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1780.004710] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02ab752-2141-40a7-a469-f0da73887e66 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.013015] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5fe612-77f5-4d34-8b62-37a1affb3864 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.026708] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925a32ee-ebdb-4844-8db8-c577f4d057e2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.032847] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0272de-4619-4b66-9bb3-2f90cdc8b9c0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.062330] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180649MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1780.062478] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.062668] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.141008] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a650e57a-85cf-416c-8787-a4ab98d4a930 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141283] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141422] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141548] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141669] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141785] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.141970] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.142117] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.142237] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.142352] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.156606] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.168026] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.180395] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.191257] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.191497] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1780.191641] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1780.362061] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdef52c-ca25-4035-a1f3-95d9e6a42c09 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.369486] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7513b293-845d-4fa0-869c-195ace3b0ada {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.400028] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2ef909-fb98-45a3-8b2e-c06dd0e55c28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.407520] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2c363a-5767-49a2-9f4f-70be391e11b9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.422641] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.431475] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.449618] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1780.450248] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.387s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.445346] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.965592] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.665887] env[61473]: WARNING oslo_vmware.rw_handles [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1816.665887] env[61473]: ERROR oslo_vmware.rw_handles [ 1816.666625] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1816.668523] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1816.668776] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Copying Virtual Disk [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/d11f6396-ca27-4082-ac4e-1046e4856d29/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1816.669097] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2390f93e-6ab6-4a2f-8dde-81c7e4ad33bb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.677592] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1816.677592] env[61473]: value = "task-4281695" [ 1816.677592] env[61473]: _type = "Task" [ 1816.677592] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.685519] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.188725] env[61473]: DEBUG oslo_vmware.exceptions [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1817.189073] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.189643] env[61473]: ERROR nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.189643] env[61473]: Faults: ['InvalidArgument'] [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Traceback (most recent call last): [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] yield resources [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self.driver.spawn(context, instance, image_meta, [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self._fetch_image_if_missing(context, vi) [ 1817.189643] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] image_cache(vi, tmp_image_ds_loc) [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] vm_util.copy_virtual_disk( [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] session._wait_for_task(vmdk_copy_task) [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return self.wait_for_task(task_ref) [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return evt.wait() [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] result = hub.switch() [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1817.190080] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return self.greenlet.switch() [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self.f(*self.args, **self.kw) [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] raise exceptions.translate_fault(task_info.error) [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Faults: ['InvalidArgument'] [ 1817.190649] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] [ 1817.190649] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Terminating instance [ 1817.191634] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.191874] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.192138] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2af9b21-0dff-4769-9769-b6bb3137e29d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.194319] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1817.194515] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1817.195269] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d232a1-963f-422c-a7d5-018bf7769355 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.202281] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1817.202516] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9322b8-f063-451f-9fa1-adc53589887b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.204769] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.204956] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1817.205974] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68c5cef2-8253-43fb-9958-bb62173d380e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.210569] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 1817.210569] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]523f31fa-ef50-3863-3376-9937f6a98fac" [ 1817.210569] env[61473]: _type = "Task" [ 1817.210569] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.219770] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]523f31fa-ef50-3863-3376-9937f6a98fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.270898] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1817.271158] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1817.271320] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleting the datastore file [datastore2] a650e57a-85cf-416c-8787-a4ab98d4a930 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.271587] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-002b1350-4953-45e3-861f-69da27ea9cab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.277700] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for the task: (returnval){ [ 1817.277700] env[61473]: value = "task-4281697" [ 1817.277700] env[61473]: _type = "Task" [ 1817.277700] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.285092] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.720685] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1817.721081] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.721188] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6798a4f-86ac-419e-9254-f846d558fc4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.731489] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.731667] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Fetch image to [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1817.731837] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1817.732540] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf686df3-2a4b-4a87-a364-54b7139e4803 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.738838] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759619bf-d27a-4616-b812-6459144af2e2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.747355] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a89d43c-9abc-4a84-a945-d3f5b0ca5910 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.777806] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7088a860-0da5-49b1-b940-b997314ffe62 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.786789] env[61473]: DEBUG oslo_vmware.api [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Task: {'id': task-4281697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078194} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.788075] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1817.788265] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1817.788436] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1817.788609] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1817.790311] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef7be373-f508-4bac-84da-155a719c0563 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.792071] env[61473]: DEBUG nova.compute.claims [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1817.792244] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.792458] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.812431] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1817.870996] env[61473]: DEBUG oslo_vmware.rw_handles [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1817.928434] env[61473]: DEBUG oslo_vmware.rw_handles [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1817.928635] env[61473]: DEBUG oslo_vmware.rw_handles [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1818.054764] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a167580-543e-4c7e-9ae1-f668d091e744 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.062757] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88f7819-1c17-4bf0-94ae-46497085c4b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.093787] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc66919-c634-427f-9f7d-c8097738cc8a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.100864] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6088b19-b281-49f4-8e01-0922e0ce6bb0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.115650] env[61473]: DEBUG nova.compute.provider_tree [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.125367] env[61473]: DEBUG nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1818.140429] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.140965] env[61473]: ERROR nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.140965] env[61473]: Faults: ['InvalidArgument'] [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Traceback (most recent call last): [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self.driver.spawn(context, instance, image_meta, [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self._fetch_image_if_missing(context, vi) [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] image_cache(vi, tmp_image_ds_loc) [ 1818.140965] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] vm_util.copy_virtual_disk( [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] session._wait_for_task(vmdk_copy_task) [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return self.wait_for_task(task_ref) [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return evt.wait() [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] result = hub.switch() [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] return self.greenlet.switch() [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1818.141482] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] self.f(*self.args, **self.kw) [ 1818.142057] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1818.142057] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] raise exceptions.translate_fault(task_info.error) [ 1818.142057] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.142057] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Faults: ['InvalidArgument'] [ 1818.142057] env[61473]: ERROR nova.compute.manager [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] [ 1818.142057] env[61473]: DEBUG nova.compute.utils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1818.143124] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Build of instance a650e57a-85cf-416c-8787-a4ab98d4a930 was re-scheduled: A specified parameter was not correct: fileType [ 1818.143124] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1818.143547] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1818.143722] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1818.143894] env[61473]: DEBUG nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1818.144078] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1818.582783] env[61473]: DEBUG nova.network.neutron [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.598980] env[61473]: INFO nova.compute.manager [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Took 0.45 seconds to deallocate network for instance. [ 1818.697045] env[61473]: INFO nova.scheduler.client.report [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Deleted allocations for instance a650e57a-85cf-416c-8787-a4ab98d4a930 [ 1818.718294] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9306a145-e717-4577-9877-925e378ed1f0 tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 686.831s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.719428] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.972s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.719644] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Acquiring lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.719849] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.720025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.722961] env[61473]: INFO nova.compute.manager [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Terminating instance [ 1818.724990] env[61473]: DEBUG nova.compute.manager [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1818.725216] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1818.725477] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08c3843c-1754-4870-b052-4efe929b25e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.729579] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1818.736757] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10aaa96d-5d7b-4f59-a94a-c203c08f7b34 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.765792] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a650e57a-85cf-416c-8787-a4ab98d4a930 could not be found. [ 1818.765989] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1818.766183] env[61473]: INFO nova.compute.manager [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1818.766429] env[61473]: DEBUG oslo.service.loopingcall [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.770777] env[61473]: DEBUG nova.compute.manager [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1818.770879] env[61473]: DEBUG nova.network.neutron [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1818.782971] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.783233] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.784818] env[61473]: INFO nova.compute.claims [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1818.795439] env[61473]: DEBUG nova.network.neutron [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.812856] env[61473]: INFO nova.compute.manager [-] [instance: a650e57a-85cf-416c-8787-a4ab98d4a930] Took 0.04 seconds to deallocate network for instance. [ 1818.911050] env[61473]: DEBUG oslo_concurrency.lockutils [None req-9e163eeb-80f6-4b1d-949c-cfc8c987a78d tempest-MultipleCreateTestJSON-725599120 tempest-MultipleCreateTestJSON-725599120-project-member] Lock "a650e57a-85cf-416c-8787-a4ab98d4a930" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.986836] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1bcd23-a2f3-4dc7-ae47-c199ea23dfbd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.994337] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1d61c-01aa-477b-98d2-84a73965f104 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.023015] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84636675-1b32-42ad-ae6d-2d4e077b2ad2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.029595] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4be752-737e-4d59-b08a-9f7e76e6267e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.042331] env[61473]: DEBUG nova.compute.provider_tree [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.051605] env[61473]: DEBUG nova.scheduler.client.report [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1819.064103] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.064639] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1819.095231] env[61473]: DEBUG nova.compute.utils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.096518] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1819.096714] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1819.104062] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1819.160629] env[61473]: DEBUG nova.policy [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cc9ca85b75c4756ba365f885d45a7b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4b7d168dab4487ea3810ee49d68ee49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1819.174421] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1819.200608] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1819.200862] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1819.201029] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1819.201217] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1819.201361] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1819.201504] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1819.201708] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1819.201877] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1819.202076] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1819.202244] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1819.202416] env[61473]: DEBUG nova.virt.hardware [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1819.203321] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece25d8f-f2e5-402f-bbda-a6cbd745ad3d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.211263] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e289a83a-1a27-4db1-9d72-882d7efc6043 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.652250] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Successfully created port: b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.246099] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Successfully updated port: b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1820.256727] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.256870] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.257166] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1820.300563] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1820.550419] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Updating instance_info_cache with network_info: [{"id": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "address": "fa:16:3e:18:82:35", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb020ab30-15", "ovs_interfaceid": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.564482] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Releasing lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.564789] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance network_info: |[{"id": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "address": "fa:16:3e:18:82:35", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb020ab30-15", "ovs_interfaceid": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1820.565214] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:82:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b020ab30-1576-4d9e-905e-d33d9b7a067f', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1820.572740] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Creating folder: Project (d4b7d168dab4487ea3810ee49d68ee49). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1820.573343] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2435742c-dfd6-463a-9d21-2154379e7c5f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.584807] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Created folder: Project (d4b7d168dab4487ea3810ee49d68ee49) in parent group-v843485. [ 1820.584997] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Creating folder: Instances. Parent ref: group-v843583. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1820.585242] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f2149b8-af47-4458-87da-b92527889506 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.593661] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Created folder: Instances in parent group-v843583. [ 1820.593890] env[61473]: DEBUG oslo.service.loopingcall [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.594081] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6532eba-0297-4320-9357-165e482c3790] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1820.594276] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c800cc6e-5a4d-4de0-87a8-bbefe02ff248 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.614118] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1820.614118] env[61473]: value = "task-4281700" [ 1820.614118] env[61473]: _type = "Task" [ 1820.614118] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.622054] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281700, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.722247] env[61473]: DEBUG nova.compute.manager [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Received event network-vif-plugged-b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1820.722547] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Acquiring lock "a6532eba-0297-4320-9357-165e482c3790-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.722761] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Lock "a6532eba-0297-4320-9357-165e482c3790-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.723079] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Lock "a6532eba-0297-4320-9357-165e482c3790-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.723324] env[61473]: DEBUG nova.compute.manager [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] No waiting events found dispatching network-vif-plugged-b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1820.723509] env[61473]: WARNING nova.compute.manager [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Received unexpected event network-vif-plugged-b020ab30-1576-4d9e-905e-d33d9b7a067f for instance with vm_state building and task_state spawning. [ 1820.723679] env[61473]: DEBUG nova.compute.manager [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Received event network-changed-b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1820.723832] env[61473]: DEBUG nova.compute.manager [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Refreshing instance network info cache due to event network-changed-b020ab30-1576-4d9e-905e-d33d9b7a067f. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1820.724038] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Acquiring lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.724173] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Acquired lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.724331] env[61473]: DEBUG nova.network.neutron [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Refreshing network info cache for port b020ab30-1576-4d9e-905e-d33d9b7a067f {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1820.977894] env[61473]: DEBUG nova.network.neutron [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Updated VIF entry in instance network info cache for port b020ab30-1576-4d9e-905e-d33d9b7a067f. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1820.978271] env[61473]: DEBUG nova.network.neutron [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] [instance: a6532eba-0297-4320-9357-165e482c3790] Updating instance_info_cache with network_info: [{"id": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "address": "fa:16:3e:18:82:35", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb020ab30-15", "ovs_interfaceid": "b020ab30-1576-4d9e-905e-d33d9b7a067f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.989039] env[61473]: DEBUG oslo_concurrency.lockutils [req-20dd4539-2cf3-4463-bccb-4236aa4d55f0 req-0c599efe-a507-421e-8774-aca8ac312c92 service nova] Releasing lock "refresh_cache-a6532eba-0297-4320-9357-165e482c3790" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.124411] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281700, 'name': CreateVM_Task, 'duration_secs': 0.401073} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.124597] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6532eba-0297-4320-9357-165e482c3790] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1821.125219] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.125383] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.125709] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1821.125999] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdf52154-7e9e-4aaa-be14-575023ffdc71 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.129840] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for the task: (returnval){ [ 1821.129840] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5263e0a6-32c4-f0d4-97da-14b4768f5eb7" [ 1821.129840] env[61473]: _type = "Task" [ 1821.129840] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.136821] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5263e0a6-32c4-f0d4-97da-14b4768f5eb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.640623] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.640623] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1821.640623] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.104755] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "2f33fc61-3ea2-4818-918a-76cdae031a79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.105123] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "2f33fc61-3ea2-4818-918a-76cdae031a79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.968039] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.973831] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.968051] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.968051] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.968051] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.968051] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1839.966724] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.978694] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.979043] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.979088] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.979241] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1839.980641] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6497c76e-fa6e-40aa-a42d-27a9ec97eb1e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.989295] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942bf082-37ae-423c-ab35-4c8a45e97eca {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.003105] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc8dc5-5d84-40cc-8627-9166bd97990a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.009018] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e97130-797e-4d5a-a8fa-8b3ccd53707e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.037668] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180633MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1840.037765] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.037954] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.188835] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189014] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b3a2455d-eeb2-4681-94a7-69951a17b79f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189157] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189284] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189402] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189521] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189632] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189744] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189857] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.189968] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1840.203078] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.213629] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.222922] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.232109] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1840.232329] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1840.232505] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1840.248678] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1840.262621] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1840.262795] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1840.272767] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1840.289078] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1840.442524] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dd8e23-cbcd-4de8-9bf4-cc2033ab72c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.450216] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717b3f73-0624-4b19-828b-d94fca01f376 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.479172] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00797121-5f6a-4f15-be09-d94ffb8bebf1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.485880] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d493e9bf-471b-4550-bc07-edd3341e72ec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.498624] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.507426] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1840.532038] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1840.532243] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.494s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.532593] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.962289] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.965915] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.966086] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1841.966212] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1841.988228] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.988395] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.988529] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.988903] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.988903] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.988903] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.989058] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.989144] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.989301] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.989365] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1841.989469] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1843.966709] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.967040] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 1844.975613] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.966765] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.967036] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11255}} [ 1849.977269] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] There are 0 instances to clean {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 1851.973010] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1867.524061] env[61473]: WARNING oslo_vmware.rw_handles [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1867.524061] env[61473]: ERROR oslo_vmware.rw_handles [ 1867.524061] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1867.526171] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1867.526401] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Copying Virtual Disk [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/81567f30-e1a4-43f5-8081-75d64dc03a86/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1867.526702] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f7f194-0e4b-4646-a54f-a9bd23ee7975 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.534393] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 1867.534393] env[61473]: value = "task-4281701" [ 1867.534393] env[61473]: _type = "Task" [ 1867.534393] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.542953] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.045218] env[61473]: DEBUG oslo_vmware.exceptions [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1868.045497] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.046069] env[61473]: ERROR nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.046069] env[61473]: Faults: ['InvalidArgument'] [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Traceback (most recent call last): [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] yield resources [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self.driver.spawn(context, instance, image_meta, [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self._fetch_image_if_missing(context, vi) [ 1868.046069] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] image_cache(vi, tmp_image_ds_loc) [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] vm_util.copy_virtual_disk( [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] session._wait_for_task(vmdk_copy_task) [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return self.wait_for_task(task_ref) [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return evt.wait() [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] result = hub.switch() [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1868.046510] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return self.greenlet.switch() [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self.f(*self.args, **self.kw) [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] raise exceptions.translate_fault(task_info.error) [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Faults: ['InvalidArgument'] [ 1868.046924] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] [ 1868.046924] env[61473]: INFO nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Terminating instance [ 1868.048083] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.048296] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.048535] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae231808-f7bf-4c92-b442-2ace31eda2ff {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.051018] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1868.051215] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.051956] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46733dc-e1b3-4ca6-92f7-2b6f07934db5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.058672] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1868.058889] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3be8b8c2-fdb7-4f58-98e2-b4d162e1367c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.061133] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.061304] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1868.062271] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c805714b-9298-4d64-9a02-5fa9a6a53653 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.067088] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for the task: (returnval){ [ 1868.067088] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5275ba6c-862f-cb28-2a7c-8217c5f7c0a2" [ 1868.067088] env[61473]: _type = "Task" [ 1868.067088] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.074075] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5275ba6c-862f-cb28-2a7c-8217c5f7c0a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.128741] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1868.129068] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1868.129371] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleting the datastore file [datastore2] 21e47c1d-d2be-427c-8b09-4e8da3df126b {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.129754] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e191a6da-21d1-40bb-b4be-8937b4c212d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.138253] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 1868.138253] env[61473]: value = "task-4281703" [ 1868.138253] env[61473]: _type = "Task" [ 1868.138253] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.149594] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.578032] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1868.578377] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Creating directory with path [datastore2] vmware_temp/d6399201-fdc6-48c0-9265-fc591d36ed2c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.578544] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5321796e-93e7-40ab-8892-85201cbad5f9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.590342] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Created directory with path [datastore2] vmware_temp/d6399201-fdc6-48c0-9265-fc591d36ed2c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.590553] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Fetch image to [datastore2] vmware_temp/d6399201-fdc6-48c0-9265-fc591d36ed2c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1868.590712] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/d6399201-fdc6-48c0-9265-fc591d36ed2c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1868.591502] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d53d1c9-e52b-465b-a84c-23cc8febdb32 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.598274] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b244064-9871-4fe3-b97d-880bb9d1c05e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.608176] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704bac01-3df9-4b2c-badc-d165a1948441 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.638986] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf571b4-eb60-41b8-a713-5ca42082684e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.650444] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-51f99a5c-b8c4-48ab-b16b-869e6684b30f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.652324] env[61473]: DEBUG oslo_vmware.api [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084802} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.652576] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.652754] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1868.652925] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1868.653155] env[61473]: INFO nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1868.655979] env[61473]: DEBUG nova.compute.claims [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1868.656167] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.656376] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.677059] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1868.830664] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.831479] env[61473]: ERROR nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = getattr(controller, method)(*args, **kwargs) [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._get(image_id) [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1868.831479] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] resp, body = self.http_client.get(url, headers=header) [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.request(url, 'GET', **kwargs) [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._handle_response(resp) [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise exc.from_response(resp, resp.content) [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] During handling of the above exception, another exception occurred: [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1868.831877] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] yield resources [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self.driver.spawn(context, instance, image_meta, [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._fetch_image_if_missing(context, vi) [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image_fetch(context, vi, tmp_image_ds_loc) [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] images.fetch_image( [ 1868.832226] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] metadata = IMAGE_API.get(context, image_ref) [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return session.show(context, image_id, [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] _reraise_translated_image_exception(image_id) [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise new_exc.with_traceback(exc_trace) [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = getattr(controller, method)(*args, **kwargs) [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1868.832675] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._get(image_id) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] resp, body = self.http_client.get(url, headers=header) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.request(url, 'GET', **kwargs) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._handle_response(resp) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise exc.from_response(resp, resp.content) [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1868.833043] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1868.833720] env[61473]: INFO nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Terminating instance [ 1868.833720] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.833720] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.834189] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.834372] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.834529] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1868.835549] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3c762b5-ed34-46de-8606-82601e85eba2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.846729] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.846845] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1868.847865] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a92f9a-7676-44a7-bf05-95b307579d04 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.855475] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1868.855475] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52ab0296-0923-7624-792f-26abc0a643c0" [ 1868.855475] env[61473]: _type = "Task" [ 1868.855475] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.864052] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52ab0296-0923-7624-792f-26abc0a643c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.864923] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1868.887068] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.912203] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82e1b23-c9bf-477b-8902-efdbc1f49200 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.920030] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe60951c-f449-44df-9633-abd118f5764e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.950572] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fce42a-3a94-40fc-bc44-8ca3efb07a27 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.957628] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.959695] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf81bc8-4642-429f-a810-f9afafd1dc30 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.973140] env[61473]: DEBUG nova.compute.provider_tree [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.974813] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Releasing lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.975210] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1868.975395] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.976540] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2ed84e-1c0d-44b7-9e3b-c9b32647296a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.982241] env[61473]: DEBUG nova.scheduler.client.report [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.987224] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1868.987595] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc58adbc-2a06-4528-8e24-ce7126ae9570 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.995512] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.339s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.996035] env[61473]: ERROR nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.996035] env[61473]: Faults: ['InvalidArgument'] [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Traceback (most recent call last): [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self.driver.spawn(context, instance, image_meta, [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self._fetch_image_if_missing(context, vi) [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] image_cache(vi, tmp_image_ds_loc) [ 1868.996035] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] vm_util.copy_virtual_disk( [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] session._wait_for_task(vmdk_copy_task) [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return self.wait_for_task(task_ref) [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return evt.wait() [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] result = hub.switch() [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] return self.greenlet.switch() [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1868.996455] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] self.f(*self.args, **self.kw) [ 1868.996961] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1868.996961] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] raise exceptions.translate_fault(task_info.error) [ 1868.996961] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.996961] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Faults: ['InvalidArgument'] [ 1868.996961] env[61473]: ERROR nova.compute.manager [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] [ 1868.996961] env[61473]: DEBUG nova.compute.utils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1869.000713] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Build of instance 21e47c1d-d2be-427c-8b09-4e8da3df126b was re-scheduled: A specified parameter was not correct: fileType [ 1869.000713] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1869.001119] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1869.001296] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1869.001468] env[61473]: DEBUG nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1869.001629] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1869.010660] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1869.010857] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1869.011047] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Deleting the datastore file [datastore2] b3a2455d-eeb2-4681-94a7-69951a17b79f {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1869.011290] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9a374b1-924c-4e4c-a881-c2867c8788e6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.018129] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for the task: (returnval){ [ 1869.018129] env[61473]: value = "task-4281705" [ 1869.018129] env[61473]: _type = "Task" [ 1869.018129] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.025472] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Task: {'id': task-4281705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.369829] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1869.369829] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.369829] env[61473]: DEBUG nova.network.neutron [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.370083] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-423673ec-888d-4750-b410-e4dac61e15e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.382944] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.383122] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Fetch image to [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1869.383328] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1869.384634] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf71f758-8198-4276-8fd2-40233d647170 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.388240] env[61473]: INFO nova.compute.manager [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Took 0.39 seconds to deallocate network for instance. [ 1869.398197] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a932c025-9076-4762-87fc-8350642ab7d0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.409804] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba8e5ed-6e22-484a-abba-460d9c05bb9d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.445091] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec572d1-cab4-4ad4-b5ef-ac9441f8fc79 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.451921] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b568a8db-5921-40a5-8c56-a61bc4b61082 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.478385] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1869.509196] env[61473]: INFO nova.scheduler.client.report [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted allocations for instance 21e47c1d-d2be-427c-8b09-4e8da3df126b [ 1869.531241] env[61473]: DEBUG oslo_vmware.api [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Task: {'id': task-4281705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093975} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.531497] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1869.531676] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1869.532320] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1869.532320] env[61473]: INFO nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1869.532320] env[61473]: DEBUG oslo.service.loopingcall [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.533079] env[61473]: DEBUG nova.compute.manager [-] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1869.533992] env[61473]: DEBUG oslo_concurrency.lockutils [None req-58a4a280-9143-4319-a3c5-084569ff7acb tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 693.744s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.535781] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 497.872s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.536012] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.536218] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.537017] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.537887] env[61473]: DEBUG nova.compute.claims [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1869.538060] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.538262] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.541344] env[61473]: INFO nova.compute.manager [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Terminating instance [ 1869.543254] env[61473]: DEBUG oslo_vmware.rw_handles [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1869.545040] env[61473]: DEBUG nova.compute.manager [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1869.545230] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1869.545477] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57fc306f-af9a-45ad-9a63-a98979dc419f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.602194] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1869.611614] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b0c57c-fad5-40d7-874a-3f795b319b9f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.622343] env[61473]: DEBUG oslo_vmware.rw_handles [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1869.622528] env[61473]: DEBUG oslo_vmware.rw_handles [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1869.641439] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21e47c1d-d2be-427c-8b09-4e8da3df126b could not be found. [ 1869.641644] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1869.641846] env[61473]: INFO nova.compute.manager [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1869.642109] env[61473]: DEBUG oslo.service.loopingcall [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.646451] env[61473]: DEBUG nova.compute.manager [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1869.646554] env[61473]: DEBUG nova.network.neutron [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1869.660948] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.673330] env[61473]: DEBUG nova.network.neutron [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.682739] env[61473]: INFO nova.compute.manager [-] [instance: 21e47c1d-d2be-427c-8b09-4e8da3df126b] Took 0.04 seconds to deallocate network for instance. [ 1869.778943] env[61473]: DEBUG oslo_concurrency.lockutils [None req-c8ee9ac8-bfed-4550-9226-def59778454e tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "21e47c1d-d2be-427c-8b09-4e8da3df126b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.790466] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa32d79-ec94-4ba0-a727-3f3cd284a8e1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.798207] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf48745a-5225-4327-bff2-2471694d8c1e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.827583] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e136966-09ce-415f-a9be-35a65a18a6bd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.833873] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5b2ba4-46ae-44e7-935e-915bd982aeae {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.846437] env[61473]: DEBUG nova.compute.provider_tree [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.854385] env[61473]: DEBUG nova.scheduler.client.report [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1869.868787] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.869507] env[61473]: ERROR nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = getattr(controller, method)(*args, **kwargs) [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._get(image_id) [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1869.869507] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] resp, body = self.http_client.get(url, headers=header) [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.request(url, 'GET', **kwargs) [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._handle_response(resp) [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise exc.from_response(resp, resp.content) [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] During handling of the above exception, another exception occurred: [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1869.869854] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self.driver.spawn(context, instance, image_meta, [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._fetch_image_if_missing(context, vi) [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image_fetch(context, vi, tmp_image_ds_loc) [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] images.fetch_image( [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] metadata = IMAGE_API.get(context, image_ref) [ 1869.870183] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return session.show(context, image_id, [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] _reraise_translated_image_exception(image_id) [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise new_exc.with_traceback(exc_trace) [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = getattr(controller, method)(*args, **kwargs) [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._get(image_id) [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1869.870561] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] resp, body = self.http_client.get(url, headers=header) [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.request(url, 'GET', **kwargs) [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self._handle_response(resp) [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise exc.from_response(resp, resp.content) [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] nova.exception.ImageNotAuthorized: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. [ 1869.870947] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1869.870947] env[61473]: DEBUG nova.compute.utils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1869.871305] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.210s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.872607] env[61473]: INFO nova.compute.claims [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1869.875051] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Build of instance b3a2455d-eeb2-4681-94a7-69951a17b79f was re-scheduled: Not authorized for image aa35b7fc-44b5-479c-b6c8-60930c581f0d. {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1869.875521] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1869.875750] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.875900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.876075] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1869.905129] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1869.983084] env[61473]: DEBUG nova.network.neutron [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.994097] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Releasing lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.994306] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1869.994492] env[61473]: DEBUG nova.compute.manager [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1870.083990] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09173738-e36b-4403-85e0-eb815ec5c83a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.091828] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6b7c5d-2390-4174-9c0f-2a9c9fa00980 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.095398] env[61473]: INFO nova.scheduler.client.report [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Deleted allocations for instance b3a2455d-eeb2-4681-94a7-69951a17b79f [ 1870.129840] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac7146ea-c753-4bfa-aeb6-9b86d890f09b tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.065s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.130588] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23ab70f-b62a-4be8-bcbf-1eb7f29cb876 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.134066] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.921s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.134302] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "b3a2455d-eeb2-4681-94a7-69951a17b79f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.134503] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.134670] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.137095] env[61473]: INFO nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Terminating instance [ 1870.138736] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquiring lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.138859] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Acquired lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.139049] env[61473]: DEBUG nova.network.neutron [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.143607] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda4feed-aba7-4bc8-b930-9591413dd9e1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.150334] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1870.163873] env[61473]: DEBUG nova.compute.provider_tree [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.170424] env[61473]: DEBUG nova.network.neutron [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1870.173324] env[61473]: DEBUG nova.scheduler.client.report [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.189713] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.190228] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1870.212207] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.212513] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.214184] env[61473]: INFO nova.compute.claims [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.230268] env[61473]: DEBUG nova.compute.utils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.231646] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Not allocating networking since 'none' was specified. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1870.240448] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1870.244042] env[61473]: DEBUG nova.network.neutron [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.251527] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Releasing lock "refresh_cache-b3a2455d-eeb2-4681-94a7-69951a17b79f" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.251914] env[61473]: DEBUG nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1870.252128] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1870.252784] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59b954a1-ed28-4f57-bb73-d84f07cc7a93 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.262367] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062cfd72-4a0e-48a3-9f19-10aa0910dae6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.294145] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b3a2455d-eeb2-4681-94a7-69951a17b79f could not be found. [ 1870.294363] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1870.294539] env[61473]: INFO nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1870.294782] env[61473]: DEBUG oslo.service.loopingcall [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.295009] env[61473]: DEBUG nova.compute.manager [-] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1870.295116] env[61473]: DEBUG nova.network.neutron [-] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1870.311344] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1870.378187] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1870.378469] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1870.378625] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1870.378803] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1870.378995] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1870.379376] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1870.379376] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1870.379512] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1870.379637] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1870.379797] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1870.379966] env[61473]: DEBUG nova.virt.hardware [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1870.382231] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7939316f-066e-46cb-aa25-9c66bfc5841d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.389232] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6582aea-03c7-49a1-81f3-2c1a705fc0fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.394650] env[61473]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1870.394889] env[61473]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-2e8ab0bf-2a93-42aa-82dc-eb636face19b'] [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.395401] env[61473]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.395936] env[61473]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.396497] env[61473]: ERROR oslo.service.loopingcall [ 1870.396990] env[61473]: ERROR nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.407776] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance VIF info [] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1870.413479] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Creating folder: Project (0ede1dac1ccc4752abb9bcc2d6cd0f22). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1870.415915] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f03a7fc-500d-43d4-8e55-c1349bb62558 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.425118] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Created folder: Project (0ede1dac1ccc4752abb9bcc2d6cd0f22) in parent group-v843485. [ 1870.425118] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Creating folder: Instances. Parent ref: group-v843586. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1870.425581] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9c68737-1069-4843-9f17-cf10a8f20a73 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.428316] env[61473]: ERROR nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] exception_handler_v20(status_code, error_body) [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise client_exc(message=error_message, [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Neutron server returns request_ids: ['req-2e8ab0bf-2a93-42aa-82dc-eb636face19b'] [ 1870.428316] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] During handling of the above exception, another exception occurred: [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Traceback (most recent call last): [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._delete_instance(context, instance, bdms) [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._shutdown_instance(context, instance, bdms) [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._try_deallocate_network(context, instance, requested_networks) [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] with excutils.save_and_reraise_exception(): [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.428785] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self.force_reraise() [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise self.value [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] _deallocate_network_with_retries() [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return evt.wait() [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = hub.switch() [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.greenlet.switch() [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.429245] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = func(*self.args, **self.kw) [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] result = f(*args, **kwargs) [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._deallocate_network( [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self.network_api.deallocate_for_instance( [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] data = neutron.list_ports(**search_opts) [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.list('ports', self.ports_path, retrieve_all, [ 1870.429655] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] for r in self._pagination(collection, path, **params): [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] res = self.get(path, params=params) [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.retry_request("GET", action, body=body, [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.430107] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] return self.do_request(method, action, body=body, [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] ret = obj(*args, **kwargs) [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] self._handle_fault_response(status_code, replybody, resp) [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.430570] env[61473]: ERROR nova.compute.manager [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] [ 1870.439735] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Created folder: Instances in parent group-v843586. [ 1870.439923] env[61473]: DEBUG oslo.service.loopingcall [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.442070] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1870.442292] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e218922f-9ed1-4296-9c3f-07f46c535fe9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.458878] env[61473]: DEBUG oslo_concurrency.lockutils [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Lock "b3a2455d-eeb2-4681-94a7-69951a17b79f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.325s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.462393] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1870.462393] env[61473]: value = "task-4281708" [ 1870.462393] env[61473]: _type = "Task" [ 1870.462393] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.470158] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281708, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.473708] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672f51c1-2137-4b2f-ab2b-76a2fe09a8ea {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.480421] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb48628f-1cff-4cfd-8fe1-425be731f888 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.516029] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127914b8-ef8b-4f11-b751-880ccf010dcf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.518051] env[61473]: INFO nova.compute.manager [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] [instance: b3a2455d-eeb2-4681-94a7-69951a17b79f] Successfully reverted task state from None on failure for instance. [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server [None req-1b539160-3366-4bff-949e-91016fdf0734 tempest-ServerShowV257Test-1569778851 tempest-ServerShowV257Test-1569778851-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-2e8ab0bf-2a93-42aa-82dc-eb636face19b'] [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1870.522043] env[61473]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.522641] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3349, in terminate_instance [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in do_terminate_instance [ 1870.523127] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 1870.523601] env[61473]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.524104] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.524566] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.525075] env[61473]: ERROR oslo_messaging.rpc.server [ 1870.525729] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66811289-8f4d-401d-a21a-bc58f010ca7c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.541022] env[61473]: DEBUG nova.compute.provider_tree [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.550022] env[61473]: DEBUG nova.scheduler.client.report [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.568215] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.356s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.568715] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1870.604957] env[61473]: DEBUG nova.compute.utils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.606929] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1870.606929] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1870.616510] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1870.685844] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1870.694808] env[61473]: DEBUG nova.policy [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd79499f5069245d89c6cd1c51e1b754d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3672e7acb99b4704be2776c46cb348ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1870.708337] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1870.708575] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1870.708727] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1870.708900] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1870.711598] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1870.711598] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1870.711598] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1870.711598] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1870.711598] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1870.711931] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1870.711931] env[61473]: DEBUG nova.virt.hardware [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1870.711931] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b39fe-7eea-4bae-8362-1371b31a96e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.719268] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a6d1c8-5307-49ac-bac6-c85f4c0ace66 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.971929] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281708, 'name': CreateVM_Task, 'duration_secs': 0.302144} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.972109] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1870.972544] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.972703] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.973038] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.973286] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-986fac47-4d69-4e02-9af0-0d941ad47acc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.977860] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for the task: (returnval){ [ 1870.977860] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5297dfa3-4e54-0762-4f9a-11b68bbb7c00" [ 1870.977860] env[61473]: _type = "Task" [ 1870.977860] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.985285] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5297dfa3-4e54-0762-4f9a-11b68bbb7c00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.061528] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Successfully created port: 36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1871.488292] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.488613] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1871.488863] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.633290] env[61473]: DEBUG nova.compute.manager [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Received event network-vif-plugged-36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1871.633538] env[61473]: DEBUG oslo_concurrency.lockutils [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] Acquiring lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.633726] env[61473]: DEBUG oslo_concurrency.lockutils [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.633928] env[61473]: DEBUG oslo_concurrency.lockutils [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.634321] env[61473]: DEBUG nova.compute.manager [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] No waiting events found dispatching network-vif-plugged-36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1871.634590] env[61473]: WARNING nova.compute.manager [req-4c35e409-3bff-449d-a180-cf6e558425eb req-6c44da02-ae4e-4e30-bcbb-981753524b4e service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Received unexpected event network-vif-plugged-36ec5b4d-78a2-4140-9138-69ee1705955b for instance with vm_state building and task_state spawning. [ 1871.738801] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Successfully updated port: 36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1871.759324] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.759487] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.759738] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1871.812097] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1872.210621] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Updating instance_info_cache with network_info: [{"id": "36ec5b4d-78a2-4140-9138-69ee1705955b", "address": "fa:16:3e:ad:8b:3f", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36ec5b4d-78", "ovs_interfaceid": "36ec5b4d-78a2-4140-9138-69ee1705955b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.224957] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.225277] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance network_info: |[{"id": "36ec5b4d-78a2-4140-9138-69ee1705955b", "address": "fa:16:3e:ad:8b:3f", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36ec5b4d-78", "ovs_interfaceid": "36ec5b4d-78a2-4140-9138-69ee1705955b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1872.225680] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:8b:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7514a465-f1a4-4a8b-b76b-726b1a9d7e2f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36ec5b4d-78a2-4140-9138-69ee1705955b', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1872.233379] env[61473]: DEBUG oslo.service.loopingcall [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.233845] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1872.234116] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-587cbe1a-57ee-4b40-90fc-907274eaac1a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.254208] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1872.254208] env[61473]: value = "task-4281709" [ 1872.254208] env[61473]: _type = "Task" [ 1872.254208] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.261623] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281709, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.764157] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281709, 'name': CreateVM_Task, 'duration_secs': 0.391602} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.764431] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1872.765012] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.765187] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.765491] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1872.765729] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c60108f-8b98-40fc-b69e-136b46c6871e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.769897] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 1872.769897] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5242642d-2a9a-59ae-9cc4-e47f00489759" [ 1872.769897] env[61473]: _type = "Task" [ 1872.769897] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.778471] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5242642d-2a9a-59ae-9cc4-e47f00489759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.280811] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.281094] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1873.281310] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.660181] env[61473]: DEBUG nova.compute.manager [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Received event network-changed-36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1873.660379] env[61473]: DEBUG nova.compute.manager [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Refreshing instance network info cache due to event network-changed-36ec5b4d-78a2-4140-9138-69ee1705955b. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1873.660583] env[61473]: DEBUG oslo_concurrency.lockutils [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] Acquiring lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.660724] env[61473]: DEBUG oslo_concurrency.lockutils [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] Acquired lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.660880] env[61473]: DEBUG nova.network.neutron [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Refreshing network info cache for port 36ec5b4d-78a2-4140-9138-69ee1705955b {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1874.003731] env[61473]: DEBUG nova.network.neutron [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Updated VIF entry in instance network info cache for port 36ec5b4d-78a2-4140-9138-69ee1705955b. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1874.004167] env[61473]: DEBUG nova.network.neutron [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Updating instance_info_cache with network_info: [{"id": "36ec5b4d-78a2-4140-9138-69ee1705955b", "address": "fa:16:3e:ad:8b:3f", "network": {"id": "00f5e011-93c2-4626-9f52-92ba3a3c2c1e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-717398445-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3672e7acb99b4704be2776c46cb348ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36ec5b4d-78", "ovs_interfaceid": "36ec5b4d-78a2-4140-9138-69ee1705955b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.014411] env[61473]: DEBUG oslo_concurrency.lockutils [req-805f5cd0-52f6-4009-b9bd-d41a7b32e854 req-be95533a-87e8-4e6a-9be9-cec98339acfb service nova] Releasing lock "refresh_cache-46b86ba3-99de-4493-b066-0a99bc2d2f27" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.144889] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_power_states {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.167073] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Getting list of instances from cluster (obj){ [ 1892.167073] env[61473]: value = "domain-c8" [ 1892.167073] env[61473]: _type = "ClusterComputeResource" [ 1892.167073] env[61473]: } {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1892.168483] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d744e8-f360-47d0-81a2-96f11cb26d64 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.185277] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Got total of 10 instances {{(pid=61473) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1892.185520] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid e00abe75-5243-4ab2-801b-f1d5f023b46b {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.185699] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.185861] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186028] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186186] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid cbbd16ce-8cea-4d08-b672-99da04f148e4 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186336] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 19154895-863b-4468-8737-32105f98528b {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186489] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 6b12b76a-d5a3-4a60-98e6-b0329389ca75 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186638] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid a6532eba-0297-4320-9357-165e482c3790 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.186783] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid db8f9797-0e07-422c-b0d5-562189fc3f3d {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.187069] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Triggering sync for uuid 46b86ba3-99de-4493-b066-0a99bc2d2f27 {{(pid=61473) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10365}} [ 1892.187397] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.187636] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.187833] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188040] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188339] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188433] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "19154895-863b-4468-8737-32105f98528b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188620] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188810] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "a6532eba-0297-4320-9357-165e482c3790" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.188998] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.189205] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.012582] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.967404] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.967735] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.967994] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1900.966275] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.966477] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.978430] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.978695] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.978811] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.979451] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1900.980085] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b84eee-52e4-4e41-acee-c04b74afec32 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.988542] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013dc7a9-e679-49b1-a33f-173a41fc9287 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.003962] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7702f2d-5544-4ca0-a891-e5476d47e2ed {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.009949] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9143f83-90d9-4010-86ff-345debd46326 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.039854] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180630MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1901.040041] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.040250] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.117537] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance e00abe75-5243-4ab2-801b-f1d5f023b46b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117537] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117537] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117537] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117788] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117788] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117788] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117788] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117910] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.117910] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1901.131473] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1901.141179] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1901.141413] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1901.141560] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1901.276243] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767ee230-035b-4467-8163-9dd702d1324e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.283979] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f077ae-6a65-42c3-b5fc-105ee6abcbfa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.314769] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c29401b-320d-49b6-ada0-9e969b7f2f2f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.321611] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04521f56-c086-4459-b888-cc145afe8f38 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.334439] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1901.342857] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1901.358413] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1901.358620] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.318s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.354590] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.354923] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.967069] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.967373] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1903.967373] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1903.987874] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988041] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988158] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988281] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988400] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988518] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988634] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988750] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988863] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.988973] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1903.989100] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1904.966448] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.649372] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "a6532eba-0297-4320-9357-165e482c3790" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.860137] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.918064] env[61473]: WARNING oslo_vmware.rw_handles [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.918064] env[61473]: ERROR oslo_vmware.rw_handles [ 1915.918640] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1915.921325] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1915.921598] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Copying Virtual Disk [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/4f8e43ae-7755-4fcc-975c-62f8f405aeff/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1915.921885] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1781849-6e6a-414e-83d6-3e25d1ffa256 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.929508] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1915.929508] env[61473]: value = "task-4281710" [ 1915.929508] env[61473]: _type = "Task" [ 1915.929508] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.937386] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.440293] env[61473]: DEBUG oslo_vmware.exceptions [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1916.440553] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.441125] env[61473]: ERROR nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.441125] env[61473]: Faults: ['InvalidArgument'] [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Traceback (most recent call last): [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] yield resources [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self.driver.spawn(context, instance, image_meta, [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self._fetch_image_if_missing(context, vi) [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1916.441125] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] image_cache(vi, tmp_image_ds_loc) [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] vm_util.copy_virtual_disk( [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] session._wait_for_task(vmdk_copy_task) [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return self.wait_for_task(task_ref) [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return evt.wait() [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] result = hub.switch() [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return self.greenlet.switch() [ 1916.441559] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self.f(*self.args, **self.kw) [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] raise exceptions.translate_fault(task_info.error) [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Faults: ['InvalidArgument'] [ 1916.441917] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] [ 1916.441917] env[61473]: INFO nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Terminating instance [ 1916.442996] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.444245] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.444914] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1916.445117] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1916.445360] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b7a95da-8bde-4bc4-8647-deebc86f4f4d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.448886] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c636c2-ed7d-4dc3-b028-fe9e0d450d49 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.455627] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1916.455873] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20c76301-52f5-4c73-896a-ae1b492f3c73 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.457904] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.458095] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1916.459039] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40e8d5ed-1048-43d8-a49e-28d95ce73a42 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.463753] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 1916.463753] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52faf7a1-311e-3c34-bd2c-f7fe18749cd3" [ 1916.463753] env[61473]: _type = "Task" [ 1916.463753] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.470436] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52faf7a1-311e-3c34-bd2c-f7fe18749cd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.521733] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1916.521958] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1916.522150] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleting the datastore file [datastore2] e00abe75-5243-4ab2-801b-f1d5f023b46b {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1916.522410] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b17f9b6-7ee8-4b8a-9342-766c662ed717 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.528542] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 1916.528542] env[61473]: value = "task-4281712" [ 1916.528542] env[61473]: _type = "Task" [ 1916.528542] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.536180] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.974034] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1916.974317] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating directory with path [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.974496] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07fbf8f2-8474-41ad-9433-49c465c741e8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.990359] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created directory with path [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.990551] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Fetch image to [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1916.990723] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1916.991447] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00dff8e-6b45-4c9b-84e6-49de60a6ce7d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.997664] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c653df9-ac27-4a15-bec6-e9b3953052bf {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.006349] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accd21ec-0c87-4bf3-b003-00807ca44f12 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.038630] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19bd9ba-add0-40a4-b78e-c8f031315191 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.047404] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-798f4573-f2e5-4e09-9c00-4cfafe3aa5c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.049050] env[61473]: DEBUG oslo_vmware.api [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064424} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.049379] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1917.049582] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1917.049758] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.049926] env[61473]: INFO nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1917.052043] env[61473]: DEBUG nova.compute.claims [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1917.052224] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.052492] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.076037] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1917.127252] env[61473]: DEBUG oslo_vmware.rw_handles [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1917.188552] env[61473]: DEBUG oslo_vmware.rw_handles [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1917.188782] env[61473]: DEBUG oslo_vmware.rw_handles [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1917.296078] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b569e439-ee23-425d-949a-136cf47de4d0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.304857] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702a3c21-fa3a-48d5-a23b-dd8967040c6e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.333739] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3c649c-6f3f-4482-bb84-48504de80b4c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.340772] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1416da18-3359-410b-a49d-a05b54afa1f4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.353518] env[61473]: DEBUG nova.compute.provider_tree [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.361502] env[61473]: DEBUG nova.scheduler.client.report [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.376445] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.376969] env[61473]: ERROR nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.376969] env[61473]: Faults: ['InvalidArgument'] [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Traceback (most recent call last): [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self.driver.spawn(context, instance, image_meta, [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self._fetch_image_if_missing(context, vi) [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] image_cache(vi, tmp_image_ds_loc) [ 1917.376969] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] vm_util.copy_virtual_disk( [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] session._wait_for_task(vmdk_copy_task) [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return self.wait_for_task(task_ref) [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return evt.wait() [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] result = hub.switch() [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] return self.greenlet.switch() [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1917.377361] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] self.f(*self.args, **self.kw) [ 1917.377697] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1917.377697] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] raise exceptions.translate_fault(task_info.error) [ 1917.377697] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.377697] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Faults: ['InvalidArgument'] [ 1917.377697] env[61473]: ERROR nova.compute.manager [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] [ 1917.377697] env[61473]: DEBUG nova.compute.utils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1917.379242] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Build of instance e00abe75-5243-4ab2-801b-f1d5f023b46b was re-scheduled: A specified parameter was not correct: fileType [ 1917.379242] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1917.379610] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1917.379856] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1917.379943] env[61473]: DEBUG nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1917.380118] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1917.650741] env[61473]: DEBUG nova.network.neutron [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.662621] env[61473]: INFO nova.compute.manager [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Took 0.28 seconds to deallocate network for instance. [ 1917.759910] env[61473]: INFO nova.scheduler.client.report [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted allocations for instance e00abe75-5243-4ab2-801b-f1d5f023b46b [ 1917.786239] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e323e25c-e247-427f-8137-a5874730e171 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.960s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.787487] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.812s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.787760] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.788014] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.788217] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.791680] env[61473]: INFO nova.compute.manager [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Terminating instance [ 1917.793529] env[61473]: DEBUG nova.compute.manager [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1917.793765] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1917.794072] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5dba3822-388e-4ed1-9c03-968da195db23 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.804191] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2e5469-f898-40a0-9e77-9a4137d09b77 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.815839] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1917.835874] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e00abe75-5243-4ab2-801b-f1d5f023b46b could not be found. [ 1917.836162] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.836354] env[61473]: INFO nova.compute.manager [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1917.836598] env[61473]: DEBUG oslo.service.loopingcall [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.836830] env[61473]: DEBUG nova.compute.manager [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1917.836933] env[61473]: DEBUG nova.network.neutron [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1917.861075] env[61473]: DEBUG nova.network.neutron [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.863879] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.863879] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.865210] env[61473]: INFO nova.compute.claims [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1917.868605] env[61473]: INFO nova.compute.manager [-] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] Took 0.03 seconds to deallocate network for instance. [ 1917.997179] env[61473]: DEBUG oslo_concurrency.lockutils [None req-e23bfa77-5be2-49f7-9adb-5ad1334ea2df tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.997179] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.804s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.997179] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: e00abe75-5243-4ab2-801b-f1d5f023b46b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1917.997179] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "e00abe75-5243-4ab2-801b-f1d5f023b46b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.076068] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c22dd45-cceb-4637-88ae-cffcb0faf293 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.084403] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55c7d6a-8e5a-4656-91af-2ed92d5ea707 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.115797] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d77e61-8f90-4452-990a-c9792e4ba127 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.124302] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6988eff-0602-4850-be25-e1797b842500 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.137444] env[61473]: DEBUG nova.compute.provider_tree [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.147099] env[61473]: DEBUG nova.scheduler.client.report [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1918.163374] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.163866] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1918.205550] env[61473]: DEBUG nova.compute.utils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1918.206897] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1918.207413] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1918.216122] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1918.273013] env[61473]: DEBUG nova.policy [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa72838d6ec74c2ebac9d403f5ac1cf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5fd5d032e047b8b77b2b727a03f01c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1918.281606] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1918.306737] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1918.306980] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1918.307153] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1918.307337] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1918.307484] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1918.307629] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1918.307833] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1918.307992] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1918.308178] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1918.308341] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1918.308511] env[61473]: DEBUG nova.virt.hardware [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1918.309369] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02550274-3463-4d7c-8be7-d1af058e641a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.317340] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4cf72e-a669-476d-a54e-782d5216cdd0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.587669] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Successfully created port: 63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1919.197873] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Successfully updated port: 63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1919.221885] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.222053] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.222209] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1919.291165] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1919.533028] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Updating instance_info_cache with network_info: [{"id": "63b72186-7116-41f3-84e3-02b0be88f0b6", "address": "fa:16:3e:0b:5f:33", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b72186-71", "ovs_interfaceid": "63b72186-7116-41f3-84e3-02b0be88f0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.547516] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.547828] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance network_info: |[{"id": "63b72186-7116-41f3-84e3-02b0be88f0b6", "address": "fa:16:3e:0b:5f:33", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b72186-71", "ovs_interfaceid": "63b72186-7116-41f3-84e3-02b0be88f0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1919.548278] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:5f:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b72186-7116-41f3-84e3-02b0be88f0b6', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1919.556208] env[61473]: DEBUG oslo.service.loopingcall [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.556705] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1919.556941] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdbe629d-fde6-4da1-b588-2ac44d8b4c4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.578663] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1919.578663] env[61473]: value = "task-4281713" [ 1919.578663] env[61473]: _type = "Task" [ 1919.578663] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.585137] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281713, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.686106] env[61473]: DEBUG nova.compute.manager [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Received event network-vif-plugged-63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1919.686359] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Acquiring lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.686567] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.686729] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.686894] env[61473]: DEBUG nova.compute.manager [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] No waiting events found dispatching network-vif-plugged-63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1919.687224] env[61473]: WARNING nova.compute.manager [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Received unexpected event network-vif-plugged-63b72186-7116-41f3-84e3-02b0be88f0b6 for instance with vm_state building and task_state spawning. [ 1919.687464] env[61473]: DEBUG nova.compute.manager [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Received event network-changed-63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1919.687631] env[61473]: DEBUG nova.compute.manager [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Refreshing instance network info cache due to event network-changed-63b72186-7116-41f3-84e3-02b0be88f0b6. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1919.687813] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Acquiring lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.687946] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Acquired lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.688113] env[61473]: DEBUG nova.network.neutron [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Refreshing network info cache for port 63b72186-7116-41f3-84e3-02b0be88f0b6 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1919.932121] env[61473]: DEBUG nova.network.neutron [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Updated VIF entry in instance network info cache for port 63b72186-7116-41f3-84e3-02b0be88f0b6. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1919.932499] env[61473]: DEBUG nova.network.neutron [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Updating instance_info_cache with network_info: [{"id": "63b72186-7116-41f3-84e3-02b0be88f0b6", "address": "fa:16:3e:0b:5f:33", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b72186-71", "ovs_interfaceid": "63b72186-7116-41f3-84e3-02b0be88f0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.942833] env[61473]: DEBUG oslo_concurrency.lockutils [req-e227a72b-fa66-4882-82b3-1ccbb578a292 req-314b66ee-564b-4a02-98a9-0ed7b95642aa service nova] Releasing lock "refresh_cache-7ef374d2-9dfc-420b-84f6-8dbcc8af59db" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.088896] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281713, 'name': CreateVM_Task, 'duration_secs': 0.348924} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.089107] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1920.089734] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.089899] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.090228] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1920.090465] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9577f770-a95c-4cfd-9e18-ad7441dd38c6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.094684] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 1920.094684] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]520f8cfb-cdcb-5936-3014-cce44407ed4b" [ 1920.094684] env[61473]: _type = "Task" [ 1920.094684] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.102549] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]520f8cfb-cdcb-5936-3014-cce44407ed4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.605625] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.606554] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1920.606554] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.900810] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.966864] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.967178] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.967561] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.967561] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 1961.780205] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.961804] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.965391] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.965567] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.966246] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.977795] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.978029] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.978220] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.978377] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1962.979481] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d5d137-36e9-4a02-bb76-a47a476c8df6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.988512] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbadfaf-7c0d-45f5-972e-57644d413534 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.003401] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761a028a-6d1c-46fa-9226-b04264983f7d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.009541] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b146ae2-4537-4c65-b8b7-cd0cf7f358ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.037739] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180639MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1963.037878] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.038079] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.120153] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120328] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120457] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120578] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120698] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120815] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.120932] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.121060] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.121177] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.121291] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.135187] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1963.135375] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1963.135548] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1963.257666] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a009d02d-8a71-4f14-aa32-c292783a1a65 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.265697] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35b0cd3-b1ab-4931-a41b-be2486db4024 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.296394] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d621a6-a0ca-48dd-b595-c163f9d1a8ec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.303173] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d79ed83-f564-4799-8d78-42d0fa395c84 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.315786] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.324092] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1963.338016] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1963.338215] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.300s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.338911] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.966285] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.966483] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1965.966585] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 1965.986161] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986328] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986392] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986469] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986594] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986713] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986828] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.986948] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.987078] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.987198] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 1965.987316] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 1966.396221] env[61473]: WARNING oslo_vmware.rw_handles [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1966.396221] env[61473]: ERROR oslo_vmware.rw_handles [ 1966.396968] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1966.398722] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1966.398965] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Copying Virtual Disk [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/213e240a-ed21-4cf2-932d-239a8ad22f0c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1966.399409] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7614c863-d9e6-4085-b8cc-114c22ae6d6d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.408087] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 1966.408087] env[61473]: value = "task-4281714" [ 1966.408087] env[61473]: _type = "Task" [ 1966.408087] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.416424] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.919259] env[61473]: DEBUG oslo_vmware.exceptions [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1966.919639] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.920052] env[61473]: ERROR nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.920052] env[61473]: Faults: ['InvalidArgument'] [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Traceback (most recent call last): [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] yield resources [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self.driver.spawn(context, instance, image_meta, [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self._fetch_image_if_missing(context, vi) [ 1966.920052] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] image_cache(vi, tmp_image_ds_loc) [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] vm_util.copy_virtual_disk( [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] session._wait_for_task(vmdk_copy_task) [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return self.wait_for_task(task_ref) [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return evt.wait() [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] result = hub.switch() [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1966.920450] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return self.greenlet.switch() [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self.f(*self.args, **self.kw) [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] raise exceptions.translate_fault(task_info.error) [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Faults: ['InvalidArgument'] [ 1966.920824] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] [ 1966.920824] env[61473]: INFO nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Terminating instance [ 1966.921902] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.922120] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.922352] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e8f4c5e-0a3a-48d6-bac1-b90b48dd5359 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.924524] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1966.924766] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1966.925454] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771fcf6f-4b71-4e10-a988-196c421b26ec {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.932233] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1966.933180] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5990910a-743b-40e4-8102-6819263ac71f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.934533] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.934723] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1966.935405] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfed0597-25e8-4ec4-9f81-98affd3462a8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.940355] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for the task: (returnval){ [ 1966.940355] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]526eace8-2d3b-8b36-b983-242481b20a72" [ 1966.940355] env[61473]: _type = "Task" [ 1966.940355] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.947752] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]526eace8-2d3b-8b36-b983-242481b20a72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.009059] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1967.009296] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1967.009408] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleting the datastore file [datastore2] 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.009568] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5547dbf4-5b6d-4859-99f4-94d047543348 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.015376] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 1967.015376] env[61473]: value = "task-4281716" [ 1967.015376] env[61473]: _type = "Task" [ 1967.015376] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.022558] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.450490] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1967.450798] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Creating directory with path [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1967.450938] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84d0c7fb-80ea-4e61-97ee-45799cfc8c2e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.461481] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Created directory with path [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1967.461816] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Fetch image to [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1967.461936] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1967.462535] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b882b59-6cef-4e43-8c66-6cf973af943a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.468692] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c638f393-a681-461a-b57f-79264de453f7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.478430] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5622c1b0-ccbd-4fa8-bc09-675f24278a45 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.508091] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed39dcf-eaed-4903-816d-d568e245ec8c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.513152] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bbd11df7-448a-480b-94f4-0260d4fabc86 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.522942] env[61473]: DEBUG oslo_vmware.api [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067077} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.523196] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1967.523411] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1967.523608] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1967.523787] env[61473]: INFO nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1967.525892] env[61473]: DEBUG nova.compute.claims [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1967.526104] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.526328] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.533989] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1967.668393] env[61473]: DEBUG oslo_vmware.rw_handles [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1967.728812] env[61473]: DEBUG oslo_vmware.rw_handles [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1967.728812] env[61473]: DEBUG oslo_vmware.rw_handles [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1967.768632] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f0b7c5-1aec-4728-a0a4-0c4216b40397 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.775812] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbabec07-bb5e-46fb-af41-e3dfe1b336c3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.806089] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f309630e-4ea2-4444-88bd-c000aaafa218 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.812804] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7179b-0e11-4cf1-8874-d1e3428bd2b8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.825489] env[61473]: DEBUG nova.compute.provider_tree [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.833502] env[61473]: DEBUG nova.scheduler.client.report [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1967.848222] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.848746] env[61473]: ERROR nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.848746] env[61473]: Faults: ['InvalidArgument'] [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Traceback (most recent call last): [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self.driver.spawn(context, instance, image_meta, [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self._fetch_image_if_missing(context, vi) [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] image_cache(vi, tmp_image_ds_loc) [ 1967.848746] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] vm_util.copy_virtual_disk( [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] session._wait_for_task(vmdk_copy_task) [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return self.wait_for_task(task_ref) [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return evt.wait() [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] result = hub.switch() [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] return self.greenlet.switch() [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.849114] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] self.f(*self.args, **self.kw) [ 1967.849658] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.849658] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] raise exceptions.translate_fault(task_info.error) [ 1967.849658] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.849658] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Faults: ['InvalidArgument'] [ 1967.849658] env[61473]: ERROR nova.compute.manager [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] [ 1967.849658] env[61473]: DEBUG nova.compute.utils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1967.850826] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Build of instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 was re-scheduled: A specified parameter was not correct: fileType [ 1967.850826] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 1967.851207] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 1967.851379] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 1967.851551] env[61473]: DEBUG nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1967.851712] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1968.148056] env[61473]: DEBUG nova.network.neutron [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.160801] env[61473]: INFO nova.compute.manager [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Took 0.31 seconds to deallocate network for instance. [ 1968.271786] env[61473]: INFO nova.scheduler.client.report [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleted allocations for instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 [ 1968.294962] env[61473]: DEBUG oslo_concurrency.lockutils [None req-df9001f4-ca71-42a6-916a-9a4f62d6dcbe tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 636.367s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.296360] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.769s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.296360] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.296614] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.296614] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.298488] env[61473]: INFO nova.compute.manager [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Terminating instance [ 1968.300081] env[61473]: DEBUG nova.compute.manager [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 1968.300285] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1968.300747] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63a879b3-80a4-4acc-a595-fdedeac5f230 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.310367] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e553c67-50ab-462c-b849-35991003cf0f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.321209] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 1968.340710] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 64dc3dee-8479-478b-87c8-2bb0ae0f99d4 could not be found. [ 1968.340916] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.341111] env[61473]: INFO nova.compute.manager [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1968.341820] env[61473]: DEBUG oslo.service.loopingcall [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1968.341820] env[61473]: DEBUG nova.compute.manager [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1968.341820] env[61473]: DEBUG nova.network.neutron [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1968.369322] env[61473]: DEBUG nova.network.neutron [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.372278] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.372514] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.374027] env[61473]: INFO nova.compute.claims [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1968.377051] env[61473]: INFO nova.compute.manager [-] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] Took 0.04 seconds to deallocate network for instance. [ 1968.460011] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ac17e3c4-569e-4cf6-9f65-a78eddcb34cb tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.461018] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 76.273s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.461213] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 64dc3dee-8479-478b-87c8-2bb0ae0f99d4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1968.461463] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "64dc3dee-8479-478b-87c8-2bb0ae0f99d4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.542932] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcafdf7-189f-4423-80af-a32955462cfa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.550772] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afbb7d7-74cb-489e-9c30-9f4886f74100 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.579551] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac912294-a88f-4d14-a324-55300ae13471 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.586126] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5209433b-8407-4bdc-8631-ecd395682c91 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.598608] env[61473]: DEBUG nova.compute.provider_tree [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.606841] env[61473]: DEBUG nova.scheduler.client.report [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1968.620156] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.248s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.620607] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 1968.653224] env[61473]: DEBUG nova.compute.utils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1968.654566] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1968.654739] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1968.664646] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 1968.736365] env[61473]: DEBUG nova.policy [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eda5c2e486864d80b4b3f1415a181dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75fb9e80f03749519e953a48c30915c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 1968.743727] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 1968.769161] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1968.769414] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1968.769573] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1968.769755] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1968.769903] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1968.770082] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1968.770306] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1968.770527] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1968.770721] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1968.770921] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1968.771168] env[61473]: DEBUG nova.virt.hardware [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1968.772012] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ce1ccc-6025-4e25-81aa-65e28ad1e486 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.781553] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50371dda-52f7-4ac8-8fa8-3f410c72d1ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.122514] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Successfully created port: b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1969.762652] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Successfully updated port: b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1969.776676] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.776819] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.776965] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1969.839049] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1970.039367] env[61473]: DEBUG nova.network.neutron [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Updating instance_info_cache with network_info: [{"id": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "address": "fa:16:3e:dc:08:f6", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0183659-43", "ovs_interfaceid": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.052931] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.053244] env[61473]: DEBUG nova.compute.manager [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Instance network_info: |[{"id": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "address": "fa:16:3e:dc:08:f6", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0183659-43", "ovs_interfaceid": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 1970.053702] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:08:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0183659-4331-40f8-abd0-9816ae6ad3e6', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1970.061381] env[61473]: DEBUG oslo.service.loopingcall [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1970.061840] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1970.062080] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16ac4560-4bb7-4695-b9b4-75da08ee6ac4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.081941] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1970.081941] env[61473]: value = "task-4281717" [ 1970.081941] env[61473]: _type = "Task" [ 1970.081941] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.089338] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281717, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.214353] env[61473]: DEBUG nova.compute.manager [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Received event network-vif-plugged-b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1970.214560] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Acquiring lock "2f33fc61-3ea2-4818-918a-76cdae031a79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.214772] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Lock "2f33fc61-3ea2-4818-918a-76cdae031a79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.214939] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Lock "2f33fc61-3ea2-4818-918a-76cdae031a79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.215112] env[61473]: DEBUG nova.compute.manager [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] No waiting events found dispatching network-vif-plugged-b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1970.215278] env[61473]: WARNING nova.compute.manager [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Received unexpected event network-vif-plugged-b0183659-4331-40f8-abd0-9816ae6ad3e6 for instance with vm_state building and task_state spawning. [ 1970.215682] env[61473]: DEBUG nova.compute.manager [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Received event network-changed-b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 1970.215927] env[61473]: DEBUG nova.compute.manager [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Refreshing instance network info cache due to event network-changed-b0183659-4331-40f8-abd0-9816ae6ad3e6. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 1970.216318] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Acquiring lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.216499] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Acquired lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.216705] env[61473]: DEBUG nova.network.neutron [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Refreshing network info cache for port b0183659-4331-40f8-abd0-9816ae6ad3e6 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1970.489621] env[61473]: DEBUG nova.network.neutron [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Updated VIF entry in instance network info cache for port b0183659-4331-40f8-abd0-9816ae6ad3e6. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1970.489983] env[61473]: DEBUG nova.network.neutron [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Updating instance_info_cache with network_info: [{"id": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "address": "fa:16:3e:dc:08:f6", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0183659-43", "ovs_interfaceid": "b0183659-4331-40f8-abd0-9816ae6ad3e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.499378] env[61473]: DEBUG oslo_concurrency.lockutils [req-84b3738c-4daf-4f80-9454-fc5b6c7504ea req-87e7d60b-9f09-4112-8e0a-2a8d9bfb4d73 service nova] Releasing lock "refresh_cache-2f33fc61-3ea2-4818-918a-76cdae031a79" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.591994] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281717, 'name': CreateVM_Task, 'duration_secs': 0.341884} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.592171] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1970.598985] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.599167] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.599510] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1970.599743] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aae1576-cf2f-4337-be1a-0d6af34e051d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.603976] env[61473]: DEBUG oslo_vmware.api [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 1970.603976] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52fcf852-edad-af36-726c-4f0f24305d60" [ 1970.603976] env[61473]: _type = "Task" [ 1970.603976] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.611313] env[61473]: DEBUG oslo_vmware.api [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52fcf852-edad-af36-726c-4f0f24305d60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.114440] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.114764] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1971.114905] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.982631] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.575612] env[61473]: WARNING oslo_vmware.rw_handles [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.575612] env[61473]: ERROR oslo_vmware.rw_handles [ 2017.575612] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2017.577738] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2017.577983] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Copying Virtual Disk [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/f8348a3d-e30e-4198-af2a-c5d89a825efc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2017.578356] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8a2e71f-7307-4426-a487-0c42b1855e4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.585963] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for the task: (returnval){ [ 2017.585963] env[61473]: value = "task-4281718" [ 2017.585963] env[61473]: _type = "Task" [ 2017.585963] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.593834] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Task: {'id': task-4281718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.965869] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.095262] env[61473]: DEBUG oslo_vmware.exceptions [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2018.095565] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.096145] env[61473]: ERROR nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.096145] env[61473]: Faults: ['InvalidArgument'] [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Traceback (most recent call last): [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] yield resources [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self.driver.spawn(context, instance, image_meta, [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self._fetch_image_if_missing(context, vi) [ 2018.096145] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] image_cache(vi, tmp_image_ds_loc) [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] vm_util.copy_virtual_disk( [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] session._wait_for_task(vmdk_copy_task) [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return self.wait_for_task(task_ref) [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return evt.wait() [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] result = hub.switch() [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2018.096530] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return self.greenlet.switch() [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self.f(*self.args, **self.kw) [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] raise exceptions.translate_fault(task_info.error) [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Faults: ['InvalidArgument'] [ 2018.096916] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] [ 2018.096916] env[61473]: INFO nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Terminating instance [ 2018.098837] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.098837] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.098837] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dd4ef29-1412-4486-ad58-513c9f096cd2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.100919] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2018.101124] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2018.101823] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be029dc8-e1d5-4d75-a643-9a5551ad8f8e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.108112] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2018.108318] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbb97bcb-99ce-4a9b-ab8a-eb87d9631856 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.110600] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.110774] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2018.111431] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60d9192a-1e6a-48f9-a211-9529e55dae3f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.116112] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 2018.116112] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]529aaafb-7b0b-8768-c061-2fc2be419371" [ 2018.116112] env[61473]: _type = "Task" [ 2018.116112] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.129395] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]529aaafb-7b0b-8768-c061-2fc2be419371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.173657] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2018.173950] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2018.174182] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Deleting the datastore file [datastore2] bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.174458] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54873edb-63f8-4803-ad43-a570405228de {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.181087] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for the task: (returnval){ [ 2018.181087] env[61473]: value = "task-4281720" [ 2018.181087] env[61473]: _type = "Task" [ 2018.181087] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.188517] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Task: {'id': task-4281720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.627174] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.627482] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating directory with path [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.627710] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c71e1151-1a3a-4b19-95d7-6acb3fdea054 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.638728] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created directory with path [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.638924] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Fetch image to [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.639116] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2018.639831] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8870dea0-ac3f-4f59-a98e-9975ae00afb3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.646225] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b9567f-a3de-4019-9565-caa11dab8830 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.654830] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dc2705-347b-4ee5-b910-9a4b6b057d51 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.688155] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ac514a-6cb8-4cf8-a456-b106462aa772 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.695482] env[61473]: DEBUG oslo_vmware.api [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Task: {'id': task-4281720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067105} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.696823] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.697024] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2018.697200] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2018.697405] env[61473]: INFO nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2018.699428] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8d97795a-7363-4c46-be50-a8811b9808c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.702806] env[61473]: DEBUG nova.compute.claims [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2018.702985] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.703235] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.719898] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2018.774288] env[61473]: DEBUG oslo_vmware.rw_handles [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2018.832549] env[61473]: DEBUG oslo_vmware.rw_handles [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2018.832722] env[61473]: DEBUG oslo_vmware.rw_handles [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2018.932781] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af485c5-90ce-4df3-a3cc-9901643b6b14 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.940596] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0db4bf-8249-4302-9f44-25fcfc2f6038 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.969647] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae47d0a5-a274-4ee8-9182-eea099a2a6ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.976330] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a1ebe1-22b7-405d-b084-7b10c90317d6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.990615] env[61473]: DEBUG nova.compute.provider_tree [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.999550] env[61473]: DEBUG nova.scheduler.client.report [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2019.014420] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.014927] env[61473]: ERROR nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.014927] env[61473]: Faults: ['InvalidArgument'] [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Traceback (most recent call last): [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self.driver.spawn(context, instance, image_meta, [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self._fetch_image_if_missing(context, vi) [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] image_cache(vi, tmp_image_ds_loc) [ 2019.014927] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] vm_util.copy_virtual_disk( [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] session._wait_for_task(vmdk_copy_task) [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return self.wait_for_task(task_ref) [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return evt.wait() [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] result = hub.switch() [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] return self.greenlet.switch() [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2019.015362] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] self.f(*self.args, **self.kw) [ 2019.015787] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2019.015787] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] raise exceptions.translate_fault(task_info.error) [ 2019.015787] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.015787] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Faults: ['InvalidArgument'] [ 2019.015787] env[61473]: ERROR nova.compute.manager [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] [ 2019.015787] env[61473]: DEBUG nova.compute.utils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2019.016964] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Build of instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff was re-scheduled: A specified parameter was not correct: fileType [ 2019.016964] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2019.017365] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2019.017538] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2019.017718] env[61473]: DEBUG nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2019.017885] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.343884] env[61473]: DEBUG nova.network.neutron [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.355931] env[61473]: INFO nova.compute.manager [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Took 0.34 seconds to deallocate network for instance. [ 2019.453470] env[61473]: INFO nova.scheduler.client.report [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Deleted allocations for instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff [ 2019.476913] env[61473]: DEBUG oslo_concurrency.lockutils [None req-75a0ac0c-af93-4964-a160-d087ed678fd9 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.132s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.477195] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.389s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.477461] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Acquiring lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.477686] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.477855] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.479931] env[61473]: INFO nova.compute.manager [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Terminating instance [ 2019.482993] env[61473]: DEBUG nova.compute.manager [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2019.483228] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2019.483515] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91f15f20-e5ed-434b-8666-7a7f6ed1982d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.493169] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99e329a-4c4d-485b-92b0-5f7d89e8ec0c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.521108] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff could not be found. [ 2019.521313] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2019.521493] env[61473]: INFO nova.compute.manager [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2019.521743] env[61473]: DEBUG oslo.service.loopingcall [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.521953] env[61473]: DEBUG nova.compute.manager [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2019.522057] env[61473]: DEBUG nova.network.neutron [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.545165] env[61473]: DEBUG nova.network.neutron [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.552722] env[61473]: INFO nova.compute.manager [-] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] Took 0.03 seconds to deallocate network for instance. [ 2019.631228] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3b13be17-29b8-4c98-a105-fe0d97b308a0 tempest-AttachVolumeTestJSON-790514961 tempest-AttachVolumeTestJSON-790514961-project-member] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.632071] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 127.444s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.632263] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff] During sync_power_state the instance has a pending task (deleting). Skip. [ 2019.633026] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "bdb3ab18-5b02-4570-a3d5-ce07dbd2dbff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.966648] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.966944] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2021.962421] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.966079] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.966374] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.159309] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3e96d24d-dda0-4e28-a427-235d693213da tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "2f33fc61-3ea2-4818-918a-76cdae031a79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.967068] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.967068] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.978246] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.978459] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.978629] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.978781] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2023.979917] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf74f41-c611-48cc-91b4-43f1f148ac58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.988486] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ddb79b-da72-4d23-b159-bad49dc46bce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.001960] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e14d38-82b6-4b76-9120-df6f2ddb8bf0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.008097] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caf9406-8db1-40ca-9093-cfa2a47a8c58 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.037010] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180637MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2024.037158] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.037359] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.104751] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.104907] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105053] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105182] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105301] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105453] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105607] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105730] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.105845] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2024.106191] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2024.106191] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=183GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2024.218346] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d85045d-f6d5-4afb-a590-3446b1adc7bd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.225758] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7144fcc1-bb9b-44ee-b5f3-82547259045d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.255566] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c230ff58-8249-42be-a3d0-233964449eda {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.262368] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41298e39-e332-4546-b56f-07b15b3e7ee7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.275139] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.283165] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2024.298716] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2024.298898] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.262s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.299138] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.299526] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2027.299526] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2027.320179] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320410] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320504] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320618] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320781] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320872] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.320979] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.321114] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.321236] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2027.321355] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2027.321828] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.590758] env[61473]: WARNING oslo_vmware.rw_handles [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2065.590758] env[61473]: ERROR oslo_vmware.rw_handles [ 2065.591727] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2065.593292] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2065.593569] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Copying Virtual Disk [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/00631aef-fbcb-40d3-9237-10b25e30a48e/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2065.593822] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e87513e-3edb-48df-9ded-a0622a3b31a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.602573] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 2065.602573] env[61473]: value = "task-4281721" [ 2065.602573] env[61473]: _type = "Task" [ 2065.602573] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.610641] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': task-4281721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.874850] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "bf13952e-d219-4c77-9a73-ada311eeb053" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.875114] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "bf13952e-d219-4c77-9a73-ada311eeb053" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.886197] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2065.938592] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.938889] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.940326] env[61473]: INFO nova.compute.claims [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2066.091036] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4507bccf-48aa-4107-a097-3afe7b665695 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.098614] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8776a4d2-c331-4613-8f84-24cfe835090e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.129799] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771e4466-d3c3-440d-8f42-3402d4e9ae0f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.134712] env[61473]: DEBUG oslo_vmware.exceptions [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2066.135252] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.135822] env[61473]: ERROR nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.135822] env[61473]: Faults: ['InvalidArgument'] [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Traceback (most recent call last): [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] yield resources [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self.driver.spawn(context, instance, image_meta, [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self._fetch_image_if_missing(context, vi) [ 2066.135822] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] image_cache(vi, tmp_image_ds_loc) [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] vm_util.copy_virtual_disk( [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] session._wait_for_task(vmdk_copy_task) [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return self.wait_for_task(task_ref) [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return evt.wait() [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] result = hub.switch() [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2066.136269] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return self.greenlet.switch() [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self.f(*self.args, **self.kw) [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] raise exceptions.translate_fault(task_info.error) [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Faults: ['InvalidArgument'] [ 2066.136716] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] [ 2066.136716] env[61473]: INFO nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Terminating instance [ 2066.139526] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.139734] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.140575] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2066.140788] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2066.140967] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7a86eea-6d65-4e40-b752-e944b9f1b687 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.143435] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09bcac0-ef10-42fb-8f20-c43114c2adf4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.147657] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de57f17d-02e7-47f6-884c-6b8ff8eda8d2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.898412] env[61473]: DEBUG nova.compute.provider_tree [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.902165] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.902370] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2066.903696] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0df6dbe-fc8a-4c1f-a0e8-9dc4158cd61b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.907990] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2066.909024] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb984c4f-8ece-4fcd-b853-f747eb542719 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.911153] env[61473]: DEBUG nova.scheduler.client.report [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2066.915599] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for the task: (returnval){ [ 2066.915599] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]523abee6-d4e9-5ff1-b52d-c0d620fd0de2" [ 2066.915599] env[61473]: _type = "Task" [ 2066.915599] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.926150] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]523abee6-d4e9-5ff1-b52d-c0d620fd0de2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.926798] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.988s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.927305] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2066.963620] env[61473]: DEBUG nova.compute.utils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2066.965043] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2066.965229] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2066.974348] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2066.985011] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2066.985250] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2066.985413] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Deleting the datastore file [datastore2] c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.985652] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7008058-9581-44c7-a578-319df41be6d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.991877] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 2066.991877] env[61473]: value = "task-4281723" [ 2066.991877] env[61473]: _type = "Task" [ 2066.991877] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.999642] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': task-4281723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.030451] env[61473]: DEBUG nova.policy [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '679a463d26e64b3c8b61617fe97abf2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '930bd6995c2a4a6d8b2f760d584e21bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2067.036022] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2067.063878] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2067.064136] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2067.064298] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2067.064477] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2067.064623] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2067.064770] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2067.064970] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2067.065165] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2067.065332] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2067.065493] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2067.065760] env[61473]: DEBUG nova.virt.hardware [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2067.066700] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863e38f7-1302-4316-a106-9c45bea7c8b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.074381] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3657cf1b-8859-472d-a763-7f9b723a44c7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.428864] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2067.429112] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Creating directory with path [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.429359] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaa25c9a-c37a-4cbb-9ebd-698f9fab4d8f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.440930] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Created directory with path [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.441140] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Fetch image to [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2067.441312] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2067.442061] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65194c9b-d1f7-4a6a-b4a2-043a7daae8ea {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.448594] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db85cc9-ef32-4291-83fd-2e1c83b82d29 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.457225] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0811bda-8060-4189-ba40-c86308b0d3d2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.488615] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b5aea5-e430-491e-b1eb-3334cceb42fa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.496972] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d7501fe-e864-480f-b3dd-7ab67f067197 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.503318] env[61473]: DEBUG oslo_vmware.api [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': task-4281723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072892} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.503550] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.503776] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2067.503898] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.504077] env[61473]: INFO nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Took 1.36 seconds to destroy the instance on the hypervisor. [ 2067.506907] env[61473]: DEBUG nova.compute.claims [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2067.507669] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.507669] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.517046] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2067.521226] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Successfully created port: 397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2067.584246] env[61473]: DEBUG oslo_vmware.rw_handles [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2067.642422] env[61473]: DEBUG oslo_vmware.rw_handles [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2067.642554] env[61473]: DEBUG oslo_vmware.rw_handles [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2067.780034] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65732a43-e842-473b-97c9-cae61617886b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.787306] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9bddac-4d30-4935-bb6e-1123f6519628 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.821067] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c388abd1-0f57-42b4-9358-dedd9df22f11 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.828565] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d478fe7d-ce43-414d-8d49-f7dd36a92f48 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.842714] env[61473]: DEBUG nova.compute.provider_tree [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.854872] env[61473]: DEBUG nova.scheduler.client.report [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2067.869047] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.362s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.869613] env[61473]: ERROR nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.869613] env[61473]: Faults: ['InvalidArgument'] [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Traceback (most recent call last): [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self.driver.spawn(context, instance, image_meta, [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self._fetch_image_if_missing(context, vi) [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] image_cache(vi, tmp_image_ds_loc) [ 2067.869613] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] vm_util.copy_virtual_disk( [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] session._wait_for_task(vmdk_copy_task) [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return self.wait_for_task(task_ref) [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return evt.wait() [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] result = hub.switch() [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] return self.greenlet.switch() [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.869997] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] self.f(*self.args, **self.kw) [ 2067.870656] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.870656] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] raise exceptions.translate_fault(task_info.error) [ 2067.870656] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.870656] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Faults: ['InvalidArgument'] [ 2067.870656] env[61473]: ERROR nova.compute.manager [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] [ 2067.870656] env[61473]: DEBUG nova.compute.utils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2067.871761] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Build of instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb was re-scheduled: A specified parameter was not correct: fileType [ 2067.871761] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2067.872160] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2067.872330] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2067.872554] env[61473]: DEBUG nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2067.872726] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.255163] env[61473]: DEBUG nova.compute.manager [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Received event network-vif-plugged-397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2068.255417] env[61473]: DEBUG oslo_concurrency.lockutils [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] Acquiring lock "bf13952e-d219-4c77-9a73-ada311eeb053-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.255597] env[61473]: DEBUG oslo_concurrency.lockutils [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] Lock "bf13952e-d219-4c77-9a73-ada311eeb053-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.255767] env[61473]: DEBUG oslo_concurrency.lockutils [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] Lock "bf13952e-d219-4c77-9a73-ada311eeb053-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.255930] env[61473]: DEBUG nova.compute.manager [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] No waiting events found dispatching network-vif-plugged-397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2068.256103] env[61473]: WARNING nova.compute.manager [req-b6836eb5-1d23-420f-903b-61d108604031 req-49312331-7323-4211-b5cf-7ac54e07f63d service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Received unexpected event network-vif-plugged-397309a1-39af-4d03-be92-a25ff37d44f9 for instance with vm_state building and task_state spawning. [ 2068.346680] env[61473]: DEBUG nova.network.neutron [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.359026] env[61473]: INFO nova.compute.manager [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Took 0.48 seconds to deallocate network for instance. [ 2068.383631] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Successfully updated port: 397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2068.398871] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.399046] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.399201] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2068.447702] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2068.487415] env[61473]: INFO nova.scheduler.client.report [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Deleted allocations for instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb [ 2068.510649] env[61473]: DEBUG oslo_concurrency.lockutils [None req-470cbc93-7427-476c-b3c1-b054c9f6635b tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.089s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.510907] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.816s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.511138] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.511341] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.511504] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.515040] env[61473]: INFO nova.compute.manager [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Terminating instance [ 2068.516821] env[61473]: DEBUG nova.compute.manager [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2068.517030] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2068.517346] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5380cbf-e27d-4771-8003-6ea381d5be48 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.529374] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7ed739-e503-4707-8079-b42efacbea28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.557593] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb could not be found. [ 2068.557808] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.557988] env[61473]: INFO nova.compute.manager [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2068.558244] env[61473]: DEBUG oslo.service.loopingcall [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.558517] env[61473]: DEBUG nova.compute.manager [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2068.558615] env[61473]: DEBUG nova.network.neutron [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.598353] env[61473]: DEBUG nova.network.neutron [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.606409] env[61473]: INFO nova.compute.manager [-] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] Took 0.05 seconds to deallocate network for instance. [ 2068.726485] env[61473]: DEBUG oslo_concurrency.lockutils [None req-16c9f1d9-5356-4849-a397-5a9d24953ea0 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.215s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.728015] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 176.540s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.728297] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb] During sync_power_state the instance has a pending task (deleting). Skip. [ 2068.728581] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "c15e4e41-b5ad-4092-96ca-af2f0f1bf3bb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.738740] env[61473]: DEBUG nova.network.neutron [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Updating instance_info_cache with network_info: [{"id": "397309a1-39af-4d03-be92-a25ff37d44f9", "address": "fa:16:3e:c4:a0:64", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap397309a1-39", "ovs_interfaceid": "397309a1-39af-4d03-be92-a25ff37d44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.751634] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.751941] env[61473]: DEBUG nova.compute.manager [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Instance network_info: |[{"id": "397309a1-39af-4d03-be92-a25ff37d44f9", "address": "fa:16:3e:c4:a0:64", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap397309a1-39", "ovs_interfaceid": "397309a1-39af-4d03-be92-a25ff37d44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 2068.752380] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:a0:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '397309a1-39af-4d03-be92-a25ff37d44f9', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2068.760710] env[61473]: DEBUG oslo.service.loopingcall [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.761254] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2068.761451] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00b4e44a-c03f-4c77-af16-3da917e13c46 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.786983] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2068.786983] env[61473]: value = "task-4281724" [ 2068.786983] env[61473]: _type = "Task" [ 2068.786983] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.795206] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281724, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.297331] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281724, 'name': CreateVM_Task, 'duration_secs': 0.288891} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.297646] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2069.298112] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.298288] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.298633] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2069.298876] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57d4bf6d-7a55-4c29-9535-e4941538e188 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.302927] env[61473]: DEBUG oslo_vmware.api [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 2069.302927] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52c2f758-7e9b-b217-337a-6ebee6969c29" [ 2069.302927] env[61473]: _type = "Task" [ 2069.302927] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.309950] env[61473]: DEBUG oslo_vmware.api [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52c2f758-7e9b-b217-337a-6ebee6969c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.814036] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.814036] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2069.814036] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8e26e3b1-a73f-47b1-8af2-2e00271df7c2 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.283911] env[61473]: DEBUG nova.compute.manager [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Received event network-changed-397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2070.284135] env[61473]: DEBUG nova.compute.manager [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Refreshing instance network info cache due to event network-changed-397309a1-39af-4d03-be92-a25ff37d44f9. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 2070.284351] env[61473]: DEBUG oslo_concurrency.lockutils [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] Acquiring lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.284498] env[61473]: DEBUG oslo_concurrency.lockutils [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] Acquired lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.284664] env[61473]: DEBUG nova.network.neutron [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Refreshing network info cache for port 397309a1-39af-4d03-be92-a25ff37d44f9 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2070.557646] env[61473]: DEBUG nova.network.neutron [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Updated VIF entry in instance network info cache for port 397309a1-39af-4d03-be92-a25ff37d44f9. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2070.558023] env[61473]: DEBUG nova.network.neutron [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Updating instance_info_cache with network_info: [{"id": "397309a1-39af-4d03-be92-a25ff37d44f9", "address": "fa:16:3e:c4:a0:64", "network": {"id": "959fdf4f-0272-4dd9-95bf-abde334cca0a", "bridge": "br-int", "label": "tempest-ImagesTestJSON-740161760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "930bd6995c2a4a6d8b2f760d584e21bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap397309a1-39", "ovs_interfaceid": "397309a1-39af-4d03-be92-a25ff37d44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.566988] env[61473]: DEBUG oslo_concurrency.lockutils [req-f55dbc0c-a337-4d8b-8ca1-8f4e26495720 req-2108559c-a515-45a8-9aa4-b25ab98904d7 service nova] Releasing lock "refresh_cache-bf13952e-d219-4c77-9a73-ada311eeb053" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.969982] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.966424] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.965632] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.965847] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2083.961540] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.966178] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.966367] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.966761] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.978844] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.979110] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.979313] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.979494] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2084.980587] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ced7ce-6ae5-48fd-8289-9b2023e59996 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.989400] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20f3957-0519-4b77-a21d-742252e3333d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.004075] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15373b29-f980-432a-b507-82020a9e391f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.009801] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e27ec7-dc05-454b-91b0-75d295fd7c6f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.037346] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180644MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2085.037477] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.037661] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.109182] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109344] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109472] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109594] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109710] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109846] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.109967] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.110093] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.110207] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2085.110386] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2085.110521] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=183GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2085.210653] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bab1de-18e6-4269-b3b9-5cb101b69951 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.218153] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3db00c-0e3b-4f19-9179-7ef68431aa31 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.248932] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12beb547-84ff-4324-97eb-34868fecdb40 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.255573] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a608c0b-5c0f-4caa-bf7f-e0fdf4d471ea {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.268113] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.276118] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2085.291460] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2085.291656] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.254s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.291887] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.292304] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2087.292304] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2087.312882] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313096] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313172] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313301] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313424] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313546] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313662] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313778] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.313896] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2087.314127] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2088.966094] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.962985] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.790251] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "b35a272f-6743-4e9e-8181-4e704bb3aa06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.790596] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "b35a272f-6743-4e9e-8181-4e704bb3aa06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.801425] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2102.848231] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.848477] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.849880] env[61473]: INFO nova.compute.claims [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2103.011440] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22af60ed-9cbe-4c1f-82c6-d85c08202a75 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.018441] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12201e3b-daa2-45a1-8a57-953f2063d950 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.048512] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db3972c-4941-4030-a5b7-6d0674988325 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.054941] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d019529-5a2f-4be8-beda-dd97e9cb6502 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.067428] env[61473]: DEBUG nova.compute.provider_tree [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.076975] env[61473]: DEBUG nova.scheduler.client.report [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2103.091779] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.243s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.092317] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2103.122021] env[61473]: DEBUG nova.compute.utils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2103.123446] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2103.123637] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2103.131226] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2103.196536] env[61473]: DEBUG nova.policy [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cc9ca85b75c4756ba365f885d45a7b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4b7d168dab4487ea3810ee49d68ee49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2103.202902] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2103.227878] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2103.227878] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2103.228076] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.228251] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2103.228404] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.228549] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2103.228756] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2103.228916] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2103.229130] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2103.229316] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2103.229492] env[61473]: DEBUG nova.virt.hardware [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2103.230374] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89403c77-31af-4298-a376-1a2d7f5d9e1f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.238391] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5ac981-78bf-42cb-82d4-c49f7eb4d3b1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.575090] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Successfully created port: c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2104.190393] env[61473]: DEBUG nova.compute.manager [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Received event network-vif-plugged-c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2104.190763] env[61473]: DEBUG oslo_concurrency.lockutils [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] Acquiring lock "b35a272f-6743-4e9e-8181-4e704bb3aa06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.190831] env[61473]: DEBUG oslo_concurrency.lockutils [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] Lock "b35a272f-6743-4e9e-8181-4e704bb3aa06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.190986] env[61473]: DEBUG oslo_concurrency.lockutils [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] Lock "b35a272f-6743-4e9e-8181-4e704bb3aa06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.191157] env[61473]: DEBUG nova.compute.manager [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] No waiting events found dispatching network-vif-plugged-c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2104.191399] env[61473]: WARNING nova.compute.manager [req-90bdd548-8331-4887-a10a-198219c64c02 req-34d0adf3-957e-4063-8f8b-0f4c76fa03fc service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Received unexpected event network-vif-plugged-c64ed449-f32c-439a-933a-33e6ad3d11e0 for instance with vm_state building and task_state spawning. [ 2104.265613] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Successfully updated port: c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2104.275711] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.275870] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.276016] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2104.318059] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2104.744736] env[61473]: DEBUG nova.network.neutron [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Updating instance_info_cache with network_info: [{"id": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "address": "fa:16:3e:9f:d7:a3", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64ed449-f3", "ovs_interfaceid": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.759289] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Releasing lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.759421] env[61473]: DEBUG nova.compute.manager [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Instance network_info: |[{"id": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "address": "fa:16:3e:9f:d7:a3", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64ed449-f3", "ovs_interfaceid": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 2104.760181] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:d7:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c64ed449-f32c-439a-933a-33e6ad3d11e0', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2104.767752] env[61473]: DEBUG oslo.service.loopingcall [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2104.768219] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2104.768446] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3db641f-ec39-4db6-8a7f-84514ddc4d12 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.850850] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2104.850850] env[61473]: value = "task-4281725" [ 2104.850850] env[61473]: _type = "Task" [ 2104.850850] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.858923] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281725, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.360725] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281725, 'name': CreateVM_Task, 'duration_secs': 0.330901} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.362051] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2105.362051] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.362205] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.362494] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2105.362732] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51d1f57d-113f-4850-bd19-3272b0ef8c9b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.366751] env[61473]: DEBUG oslo_vmware.api [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for the task: (returnval){ [ 2105.366751] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5202c373-f5e9-0cc4-f86b-86238f10b4af" [ 2105.366751] env[61473]: _type = "Task" [ 2105.366751] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.373580] env[61473]: DEBUG oslo_vmware.api [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5202c373-f5e9-0cc4-f86b-86238f10b4af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.877197] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.877467] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.877679] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ec07ccb9-7111-4f58-b625-c84bb6875e7d tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.215548] env[61473]: DEBUG nova.compute.manager [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Received event network-changed-c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2106.215750] env[61473]: DEBUG nova.compute.manager [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Refreshing instance network info cache due to event network-changed-c64ed449-f32c-439a-933a-33e6ad3d11e0. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 2106.215964] env[61473]: DEBUG oslo_concurrency.lockutils [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] Acquiring lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.216125] env[61473]: DEBUG oslo_concurrency.lockutils [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] Acquired lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.216294] env[61473]: DEBUG nova.network.neutron [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Refreshing network info cache for port c64ed449-f32c-439a-933a-33e6ad3d11e0 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2106.467587] env[61473]: DEBUG nova.network.neutron [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Updated VIF entry in instance network info cache for port c64ed449-f32c-439a-933a-33e6ad3d11e0. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2106.467877] env[61473]: DEBUG nova.network.neutron [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Updating instance_info_cache with network_info: [{"id": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "address": "fa:16:3e:9f:d7:a3", "network": {"id": "94a714c4-67d6-438b-b0fa-72d5d5c52a50", "bridge": "br-int", "label": "tempest-ServersTestJSON-622034906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4b7d168dab4487ea3810ee49d68ee49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc64ed449-f3", "ovs_interfaceid": "c64ed449-f32c-439a-933a-33e6ad3d11e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.476618] env[61473]: DEBUG oslo_concurrency.lockutils [req-e9e69b84-19c6-46a3-ab4b-023f720dec71 req-344c1720-2591-4e26-a938-39790a4cd86d service nova] Releasing lock "refresh_cache-b35a272f-6743-4e9e-8181-4e704bb3aa06" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.766662] env[61473]: WARNING oslo_vmware.rw_handles [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2116.766662] env[61473]: ERROR oslo_vmware.rw_handles [ 2116.767192] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2116.768832] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2116.769130] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Copying Virtual Disk [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/f72d745e-dd86-4d18-ad70-f4a1334037fc/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2116.769422] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-418248b1-9694-47bb-836a-e05c44add657 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.777781] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for the task: (returnval){ [ 2116.777781] env[61473]: value = "task-4281726" [ 2116.777781] env[61473]: _type = "Task" [ 2116.777781] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.785789] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Task: {'id': task-4281726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.289019] env[61473]: DEBUG oslo_vmware.exceptions [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2117.289432] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.290129] env[61473]: ERROR nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.290129] env[61473]: Faults: ['InvalidArgument'] [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Traceback (most recent call last): [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] yield resources [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self.driver.spawn(context, instance, image_meta, [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self._fetch_image_if_missing(context, vi) [ 2117.290129] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] image_cache(vi, tmp_image_ds_loc) [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] vm_util.copy_virtual_disk( [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] session._wait_for_task(vmdk_copy_task) [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return self.wait_for_task(task_ref) [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return evt.wait() [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] result = hub.switch() [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2117.290437] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return self.greenlet.switch() [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self.f(*self.args, **self.kw) [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] raise exceptions.translate_fault(task_info.error) [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Faults: ['InvalidArgument'] [ 2117.290952] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] [ 2117.290952] env[61473]: INFO nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Terminating instance [ 2117.292272] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.292487] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.293136] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2117.293331] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2117.293553] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d92e37f0-177c-45d4-bba3-cd89252cbc4a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.296194] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825e92b7-5fa4-4aba-b1fd-8485a1017713 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.303074] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2117.303295] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c42092d-3147-4ea7-843e-c1234218287f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.305638] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.305806] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2117.306852] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e82ede3-259c-481e-9d1f-8d12be907032 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.312014] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for the task: (returnval){ [ 2117.312014] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]528672d8-72ea-2439-e7c2-d7e1ebddefe5" [ 2117.312014] env[61473]: _type = "Task" [ 2117.312014] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.318986] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]528672d8-72ea-2439-e7c2-d7e1ebddefe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.366233] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2117.366486] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2117.366676] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Deleting the datastore file [datastore2] cbbd16ce-8cea-4d08-b672-99da04f148e4 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.366976] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f51d5dfa-a4a7-427f-86ad-f57834689a90 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.373243] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for the task: (returnval){ [ 2117.373243] env[61473]: value = "task-4281728" [ 2117.373243] env[61473]: _type = "Task" [ 2117.373243] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.381043] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Task: {'id': task-4281728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.828285] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2117.828598] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Creating directory with path [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.829367] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd041626-c3c6-4d14-826f-c072f77babc8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.839959] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Created directory with path [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.840403] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Fetch image to [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2117.840403] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2117.841067] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed87ce69-2647-44c4-a24c-f80af557edf2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.847805] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f827f85-c0f4-47b7-8dc0-836909886144 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.856472] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5761c038-8ff9-4503-b4c7-97ddaa9559e9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.890128] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2584f4a4-a49d-452e-a213-fa94c299d1e7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.896874] env[61473]: DEBUG oslo_vmware.api [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Task: {'id': task-4281728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072823} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.898243] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2117.898433] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2117.898605] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2117.898790] env[61473]: INFO nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2117.900651] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b06e7265-ae68-4a23-b1d0-5bad5dfd1f64 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.902495] env[61473]: DEBUG nova.compute.claims [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2117.902669] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.902893] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.923589] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2117.990756] env[61473]: DEBUG oslo_vmware.rw_handles [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2118.052158] env[61473]: DEBUG oslo_vmware.rw_handles [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2118.052451] env[61473]: DEBUG oslo_vmware.rw_handles [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2118.161793] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61afc16-62a0-4ef2-9255-80ac4fd32564 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.169650] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c301251-8e69-4ee0-a925-4e8433d5ddd7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.198916] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc5f15-07ff-4809-8ddd-b5bea9c66dbd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.206321] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffefc88-4d39-43c9-9b75-e060436c681b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.222479] env[61473]: DEBUG nova.compute.provider_tree [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.231949] env[61473]: DEBUG nova.scheduler.client.report [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2118.247971] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.345s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.248538] env[61473]: ERROR nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.248538] env[61473]: Faults: ['InvalidArgument'] [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Traceback (most recent call last): [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self.driver.spawn(context, instance, image_meta, [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self._fetch_image_if_missing(context, vi) [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] image_cache(vi, tmp_image_ds_loc) [ 2118.248538] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] vm_util.copy_virtual_disk( [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] session._wait_for_task(vmdk_copy_task) [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return self.wait_for_task(task_ref) [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return evt.wait() [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] result = hub.switch() [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] return self.greenlet.switch() [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.249459] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] self.f(*self.args, **self.kw) [ 2118.249929] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.249929] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] raise exceptions.translate_fault(task_info.error) [ 2118.249929] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.249929] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Faults: ['InvalidArgument'] [ 2118.249929] env[61473]: ERROR nova.compute.manager [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] [ 2118.249929] env[61473]: DEBUG nova.compute.utils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2118.251305] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Build of instance cbbd16ce-8cea-4d08-b672-99da04f148e4 was re-scheduled: A specified parameter was not correct: fileType [ 2118.251305] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2118.251709] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2118.251884] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2118.252073] env[61473]: DEBUG nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2118.252245] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2118.680170] env[61473]: DEBUG nova.network.neutron [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.696030] env[61473]: INFO nova.compute.manager [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Took 0.44 seconds to deallocate network for instance. [ 2118.808547] env[61473]: INFO nova.scheduler.client.report [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Deleted allocations for instance cbbd16ce-8cea-4d08-b672-99da04f148e4 [ 2118.833766] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2d33ca49-b570-4fbc-bd55-f4a796fca007 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.257s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.834079] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.520s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.834260] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Acquiring lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.834470] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.834639] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.836904] env[61473]: INFO nova.compute.manager [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Terminating instance [ 2118.839039] env[61473]: DEBUG nova.compute.manager [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2118.839127] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2118.839990] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c57a964-00c2-4c17-b8fd-966b8138651f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.849334] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21f04d1-23d6-4c69-9e60-bd2a1f8ff73a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.878493] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cbbd16ce-8cea-4d08-b672-99da04f148e4 could not be found. [ 2118.878727] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.878877] env[61473]: INFO nova.compute.manager [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2118.879197] env[61473]: DEBUG oslo.service.loopingcall [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.879436] env[61473]: DEBUG nova.compute.manager [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2118.879529] env[61473]: DEBUG nova.network.neutron [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2118.902933] env[61473]: DEBUG nova.network.neutron [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.911164] env[61473]: INFO nova.compute.manager [-] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] Took 0.03 seconds to deallocate network for instance. [ 2119.039396] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2f97d39d-31b9-4169-af97-2e8650d84c64 tempest-ServersNegativeTestJSON-2082386120 tempest-ServersNegativeTestJSON-2082386120-project-member] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.040149] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 226.852s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.040355] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: cbbd16ce-8cea-4d08-b672-99da04f148e4] During sync_power_state the instance has a pending task (deleting). Skip. [ 2119.040538] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "cbbd16ce-8cea-4d08-b672-99da04f148e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.714462] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "8a17da2e-1070-43a1-bc00-22d9b04dd806" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.714768] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Lock "8a17da2e-1070-43a1-bc00-22d9b04dd806" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.727055] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2122.776709] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.777058] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.778993] env[61473]: INFO nova.compute.claims [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2122.954073] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820fc3b-8d02-4ab8-8786-5e8444be86f8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.962607] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e0c1ea-0059-4e9c-a504-0f7e15606578 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.993777] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396da5ce-c625-4407-87a3-97a8ca6fa0e1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.001033] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d2d71c-8cd0-49b7-b5d9-d3bce60ae794 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.014236] env[61473]: DEBUG nova.compute.provider_tree [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2123.023707] env[61473]: DEBUG nova.scheduler.client.report [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2123.044568] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.044568] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2123.079095] env[61473]: DEBUG nova.compute.utils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2123.080495] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2123.080668] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2123.090269] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2123.146425] env[61473]: DEBUG nova.policy [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '849562dd72cc4e388fd6b0adecff17b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '059e85993b13412180851c4f97997177', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2123.165556] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2123.191545] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2123.191787] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2123.191948] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2123.192145] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2123.192294] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2123.194025] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2123.194025] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2123.194025] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2123.194025] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2123.194025] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2123.194234] env[61473]: DEBUG nova.virt.hardware [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2123.194234] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a9b1a2-d23d-449e-b80a-55418a7784fc {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.202430] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0423c53d-4cbd-4d06-aee2-9c7bdcf0e0cd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.652623] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Successfully created port: 4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2124.311087] env[61473]: DEBUG nova.compute.manager [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Received event network-vif-plugged-4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2124.311331] env[61473]: DEBUG oslo_concurrency.lockutils [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] Acquiring lock "8a17da2e-1070-43a1-bc00-22d9b04dd806-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.312079] env[61473]: DEBUG oslo_concurrency.lockutils [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] Lock "8a17da2e-1070-43a1-bc00-22d9b04dd806-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.312332] env[61473]: DEBUG oslo_concurrency.lockutils [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] Lock "8a17da2e-1070-43a1-bc00-22d9b04dd806-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.312552] env[61473]: DEBUG nova.compute.manager [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] No waiting events found dispatching network-vif-plugged-4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2124.312756] env[61473]: WARNING nova.compute.manager [req-6b65f239-56bb-445c-845e-8f67855c5d9b req-b3f89908-b453-46e3-b09e-931ae6877bb0 service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Received unexpected event network-vif-plugged-4ecef31c-4c23-49b7-a165-59ac5cdef545 for instance with vm_state building and task_state spawning. [ 2124.402023] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Successfully updated port: 4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2124.413412] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.413557] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquired lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.413706] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2124.456570] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2124.616346] env[61473]: DEBUG nova.network.neutron [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Updating instance_info_cache with network_info: [{"id": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "address": "fa:16:3e:05:c6:1d", "network": {"id": "6e037293-5977-4702-b871-4e0cc482d642", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2096946511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "059e85993b13412180851c4f97997177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecef31c-4c", "ovs_interfaceid": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.630890] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Releasing lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2124.631204] env[61473]: DEBUG nova.compute.manager [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Instance network_info: |[{"id": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "address": "fa:16:3e:05:c6:1d", "network": {"id": "6e037293-5977-4702-b871-4e0cc482d642", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2096946511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "059e85993b13412180851c4f97997177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecef31c-4c", "ovs_interfaceid": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 2124.631670] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:c6:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ecef31c-4c23-49b7-a165-59ac5cdef545', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2124.639008] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Creating folder: Project (059e85993b13412180851c4f97997177). Parent ref: group-v843485. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2124.639529] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-679e5aa4-ec6c-46a0-8a5b-59c8df3e5461 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.651080] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Created folder: Project (059e85993b13412180851c4f97997177) in parent group-v843485. [ 2124.651289] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Creating folder: Instances. Parent ref: group-v843594. {{(pid=61473) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2124.651515] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-080a2779-d1b4-4dd7-b971-887a2cd4795f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.660144] env[61473]: INFO nova.virt.vmwareapi.vm_util [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Created folder: Instances in parent group-v843594. [ 2124.660367] env[61473]: DEBUG oslo.service.loopingcall [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2124.660540] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2124.660729] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cba7ebed-d44d-43e1-8b60-eede58a1ad1c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.678802] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2124.678802] env[61473]: value = "task-4281731" [ 2124.678802] env[61473]: _type = "Task" [ 2124.678802] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.685944] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281731, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.188203] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281731, 'name': CreateVM_Task, 'duration_secs': 0.284287} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.188408] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2125.189061] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.189262] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.189545] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2125.189779] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-360ad593-c1f8-4291-928d-0e5264dc801b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.193998] env[61473]: DEBUG oslo_vmware.api [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Waiting for the task: (returnval){ [ 2125.193998] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]520732ba-7121-3284-3ab2-819d9885a29b" [ 2125.193998] env[61473]: _type = "Task" [ 2125.193998] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.200862] env[61473]: DEBUG oslo_vmware.api [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]520732ba-7121-3284-3ab2-819d9885a29b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.704037] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.704355] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2125.704580] env[61473]: DEBUG oslo_concurrency.lockutils [None req-2c8a6cee-39b6-4177-8e81-bdee40df3201 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.337833] env[61473]: DEBUG nova.compute.manager [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Received event network-changed-4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2126.338061] env[61473]: DEBUG nova.compute.manager [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Refreshing instance network info cache due to event network-changed-4ecef31c-4c23-49b7-a165-59ac5cdef545. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 2126.338280] env[61473]: DEBUG oslo_concurrency.lockutils [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] Acquiring lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.338426] env[61473]: DEBUG oslo_concurrency.lockutils [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] Acquired lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.338588] env[61473]: DEBUG nova.network.neutron [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Refreshing network info cache for port 4ecef31c-4c23-49b7-a165-59ac5cdef545 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2126.585043] env[61473]: DEBUG nova.network.neutron [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Updated VIF entry in instance network info cache for port 4ecef31c-4c23-49b7-a165-59ac5cdef545. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2126.585416] env[61473]: DEBUG nova.network.neutron [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Updating instance_info_cache with network_info: [{"id": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "address": "fa:16:3e:05:c6:1d", "network": {"id": "6e037293-5977-4702-b871-4e0cc482d642", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2096946511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "059e85993b13412180851c4f97997177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecef31c-4c", "ovs_interfaceid": "4ecef31c-4c23-49b7-a165-59ac5cdef545", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.594671] env[61473]: DEBUG oslo_concurrency.lockutils [req-07fff8b3-495e-49c2-9226-152f4371c0d5 req-4ef6ba33-a60a-49e9-9796-6d4a1f2b088e service nova] Releasing lock "refresh_cache-8a17da2e-1070-43a1-bc00-22d9b04dd806" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.966377] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.966748] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.974205] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.965966] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.966376] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2145.962327] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.965910] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.966096] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.966879] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.979253] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.979481] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.979647] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.979823] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2146.980984] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8b78f7-f064-4ce9-a06d-e5c4a0daded6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.989855] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d94a45-ebd8-414e-9788-e07b672de934 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.003414] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66549804-a8e9-4c58-874b-a74664589d30 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.009571] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2019385b-be1b-40f3-b6f9-8e8a4a53bd05 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.037697] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180660MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2147.037856] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.038044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2147.180527] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 19154895-863b-4468-8737-32105f98528b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.180690] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.180824] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.180949] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181090] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181215] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181330] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181448] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181562] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181678] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2147.181889] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2147.182028] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2147.199612] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2147.212203] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2147.212389] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2147.222399] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2147.239256] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2147.357222] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffbb7cc-dcfb-4c36-b595-ce9b413f07ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.364808] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7903d99a-08ab-4ada-8a11-12f536bce67d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.393658] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06040631-d72a-41d9-90c7-50b64a6fd2f1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.401229] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3439884c-e97c-42da-ad0f-20338ee12f98 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.413859] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.422783] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2147.438967] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2147.438967] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.401s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.438945] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2149.439263] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2149.439263] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2149.460656] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.460820] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.460919] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461057] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461208] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461345] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461468] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461588] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461707] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461823] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2149.461953] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2149.966307] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.966632] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.967035] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 2158.284241] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "f9a8791e-2e0e-40a0-9562-12c8545b900a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.284536] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "f9a8791e-2e0e-40a0-9562-12c8545b900a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.976032] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2162.976032] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11255}} [ 2162.986092] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] There are 0 instances to clean {{(pid=61473) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 2167.626314] env[61473]: WARNING oslo_vmware.rw_handles [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2167.626314] env[61473]: ERROR oslo_vmware.rw_handles [ 2167.627469] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2167.628912] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2167.629186] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Copying Virtual Disk [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/d977703e-cba6-4b0c-8c6b-e774f7527934/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2167.629487] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3d34df2-58ec-48bd-8d7e-420ffb02476d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.637112] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for the task: (returnval){ [ 2167.637112] env[61473]: value = "task-4281732" [ 2167.637112] env[61473]: _type = "Task" [ 2167.637112] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.645691] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Task: {'id': task-4281732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.148780] env[61473]: DEBUG oslo_vmware.exceptions [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2168.148780] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.149429] env[61473]: ERROR nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.149429] env[61473]: Faults: ['InvalidArgument'] [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] Traceback (most recent call last): [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] yield resources [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self.driver.spawn(context, instance, image_meta, [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self._fetch_image_if_missing(context, vi) [ 2168.149429] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] image_cache(vi, tmp_image_ds_loc) [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] vm_util.copy_virtual_disk( [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] session._wait_for_task(vmdk_copy_task) [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return self.wait_for_task(task_ref) [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return evt.wait() [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] result = hub.switch() [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2168.149808] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return self.greenlet.switch() [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self.f(*self.args, **self.kw) [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] raise exceptions.translate_fault(task_info.error) [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] Faults: ['InvalidArgument'] [ 2168.150135] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] [ 2168.150135] env[61473]: INFO nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Terminating instance [ 2168.151413] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2168.151627] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2168.151874] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-628a6187-e2f6-4a9f-9aae-6b8857175a29 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.154426] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2168.154621] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2168.156056] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb18d774-75d8-46b1-8240-df37814ee24e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.162334] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2168.162567] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c6d51ae-5198-46ad-a5e7-16ebbb27f3fa {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.165749] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2168.166189] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2168.167044] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a37f42-fdd6-4d9a-9966-e686950ae8e3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.172302] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 2168.172302] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]5242f707-f584-679f-472b-bb1c82910074" [ 2168.172302] env[61473]: _type = "Task" [ 2168.172302] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.180090] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]5242f707-f584-679f-472b-bb1c82910074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.238073] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2168.238317] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2168.238499] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Deleting the datastore file [datastore2] 19154895-863b-4468-8737-32105f98528b {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2168.238764] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e7b5f1a-5b1d-4fae-9cb6-f9c647ef905c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.245216] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for the task: (returnval){ [ 2168.245216] env[61473]: value = "task-4281734" [ 2168.245216] env[61473]: _type = "Task" [ 2168.245216] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.252835] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Task: {'id': task-4281734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.682691] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2168.683048] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating directory with path [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2168.683224] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dd45b64-985e-4aa5-9f04-d8a5c8171c44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.694689] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Created directory with path [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2168.694884] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Fetch image to [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2168.695070] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2168.695842] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd66479-bb33-4c18-a956-85cc857ae145 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.703376] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3614f96-f354-42a6-ab1d-4767a6e33ac3 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.712567] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437ddafc-13df-4b6a-b863-0c8013385108 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.743958] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f882222-6e12-4954-af2f-73a7a7a71a44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.755128] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-13135c14-10d4-4656-97e3-675e7a020d6c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.756801] env[61473]: DEBUG oslo_vmware.api [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Task: {'id': task-4281734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065728} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.757056] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2168.757242] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2168.757414] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.757585] env[61473]: INFO nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2168.759692] env[61473]: DEBUG nova.compute.claims [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2168.759900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.760137] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.782838] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2168.836094] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2168.896411] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2168.896599] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2168.992107] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd72ddc3-5598-4096-b82d-22d8c9a4cc27 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.999693] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2effd58-e5bc-4ad8-9958-94955f1a83ca {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.028600] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7fa84d-3b12-46b9-96b6-3c039f039fc4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.035431] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dd9bbc-1b56-424b-a65e-3885c02fc3bd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.049372] env[61473]: DEBUG nova.compute.provider_tree [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2169.057515] env[61473]: DEBUG nova.scheduler.client.report [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2169.073856] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.074416] env[61473]: ERROR nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2169.074416] env[61473]: Faults: ['InvalidArgument'] [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] Traceback (most recent call last): [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self.driver.spawn(context, instance, image_meta, [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self._fetch_image_if_missing(context, vi) [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] image_cache(vi, tmp_image_ds_loc) [ 2169.074416] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] vm_util.copy_virtual_disk( [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] session._wait_for_task(vmdk_copy_task) [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return self.wait_for_task(task_ref) [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return evt.wait() [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] result = hub.switch() [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] return self.greenlet.switch() [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2169.074740] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] self.f(*self.args, **self.kw) [ 2169.075084] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2169.075084] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] raise exceptions.translate_fault(task_info.error) [ 2169.075084] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2169.075084] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] Faults: ['InvalidArgument'] [ 2169.075084] env[61473]: ERROR nova.compute.manager [instance: 19154895-863b-4468-8737-32105f98528b] [ 2169.075218] env[61473]: DEBUG nova.compute.utils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2169.076888] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Build of instance 19154895-863b-4468-8737-32105f98528b was re-scheduled: A specified parameter was not correct: fileType [ 2169.076888] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2169.077290] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2169.077467] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2169.077638] env[61473]: DEBUG nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2169.077800] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2169.738950] env[61473]: DEBUG nova.network.neutron [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.750241] env[61473]: INFO nova.compute.manager [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Took 0.67 seconds to deallocate network for instance. [ 2169.843633] env[61473]: INFO nova.scheduler.client.report [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Deleted allocations for instance 19154895-863b-4468-8737-32105f98528b [ 2169.865684] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a28890cb-74e0-470a-a8e1-7f7363b9727f tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.344s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.866551] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.084s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.867030] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Acquiring lock "19154895-863b-4468-8737-32105f98528b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.867030] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.867196] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.869765] env[61473]: INFO nova.compute.manager [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Terminating instance [ 2169.871375] env[61473]: DEBUG nova.compute.manager [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2169.871546] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2169.872314] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-913f6aa7-9e25-4d90-ba86-852337828066 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.882269] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517622e1-fdde-4d2f-a0e9-aecf05164b8d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.894342] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2169.914585] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 19154895-863b-4468-8737-32105f98528b could not be found. [ 2169.914797] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2169.914976] env[61473]: INFO nova.compute.manager [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] [instance: 19154895-863b-4468-8737-32105f98528b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2169.915236] env[61473]: DEBUG oslo.service.loopingcall [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2169.915464] env[61473]: DEBUG nova.compute.manager [-] [instance: 19154895-863b-4468-8737-32105f98528b] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2169.915561] env[61473]: DEBUG nova.network.neutron [-] [instance: 19154895-863b-4468-8737-32105f98528b] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2169.940141] env[61473]: DEBUG nova.network.neutron [-] [instance: 19154895-863b-4468-8737-32105f98528b] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.948550] env[61473]: INFO nova.compute.manager [-] [instance: 19154895-863b-4468-8737-32105f98528b] Took 0.03 seconds to deallocate network for instance. [ 2169.949674] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.949900] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.951503] env[61473]: INFO nova.compute.claims [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2170.054248] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ab5f80af-4707-4c7e-8d68-9dcf1515b055 tempest-ListServersNegativeTestJSON-1106955563 tempest-ListServersNegativeTestJSON-1106955563-project-member] Lock "19154895-863b-4468-8737-32105f98528b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.055101] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "19154895-863b-4468-8737-32105f98528b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 277.867s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.055296] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 19154895-863b-4468-8737-32105f98528b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2170.055472] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "19154895-863b-4468-8737-32105f98528b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.130267] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba78b9fc-18e5-4c85-a9bd-93646872fc65 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.139234] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd066a-a2a4-43d3-b5d5-e806ca0151b0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.168345] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9291ac4-9d5e-4d91-87c4-0815452b74a4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.175631] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45bf879-673f-4bf9-ba28-0777d252b3c9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.188658] env[61473]: DEBUG nova.compute.provider_tree [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.196691] env[61473]: DEBUG nova.scheduler.client.report [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2170.212882] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.213401] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2170.246291] env[61473]: DEBUG nova.compute.utils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2170.247647] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2170.247833] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2170.256268] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2170.318218] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2170.326994] env[61473]: DEBUG nova.policy [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa72838d6ec74c2ebac9d403f5ac1cf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5fd5d032e047b8b77b2b727a03f01c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2170.343899] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2170.343899] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2170.343899] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2170.344141] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2170.344315] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2170.344475] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2170.344783] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2170.344956] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2170.345168] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2170.345350] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2170.345529] env[61473]: DEBUG nova.virt.hardware [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2170.346407] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79239b2c-0608-4f02-ae24-9d43d399bf27 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.354909] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5138f5-62c6-4e46-ab0a-b4c6f8127b09 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.705075] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Successfully created port: c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2171.404333] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Successfully updated port: c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2171.414259] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.414400] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.414551] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2171.456747] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2171.689769] env[61473]: DEBUG nova.network.neutron [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Updating instance_info_cache with network_info: [{"id": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "address": "fa:16:3e:70:03:6b", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3ae15ac-ea", "ovs_interfaceid": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.702137] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.702449] env[61473]: DEBUG nova.compute.manager [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Instance network_info: |[{"id": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "address": "fa:16:3e:70:03:6b", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3ae15ac-ea", "ovs_interfaceid": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 2171.702879] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:03:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2171.710597] env[61473]: DEBUG oslo.service.loopingcall [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2171.711091] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2171.711322] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4269135d-3859-45e5-8f99-ad1c64c0f779 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.731988] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2171.731988] env[61473]: value = "task-4281735" [ 2171.731988] env[61473]: _type = "Task" [ 2171.731988] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.743505] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281735, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.776467] env[61473]: DEBUG nova.compute.manager [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Received event network-vif-plugged-c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2171.776604] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Acquiring lock "f9a8791e-2e0e-40a0-9562-12c8545b900a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.776908] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Lock "f9a8791e-2e0e-40a0-9562-12c8545b900a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.776977] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Lock "f9a8791e-2e0e-40a0-9562-12c8545b900a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.777154] env[61473]: DEBUG nova.compute.manager [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] No waiting events found dispatching network-vif-plugged-c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2171.777321] env[61473]: WARNING nova.compute.manager [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Received unexpected event network-vif-plugged-c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 for instance with vm_state building and task_state spawning. [ 2171.777481] env[61473]: DEBUG nova.compute.manager [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Received event network-changed-c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2171.777635] env[61473]: DEBUG nova.compute.manager [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Refreshing instance network info cache due to event network-changed-c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 2171.777812] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Acquiring lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2171.777948] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Acquired lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.778278] env[61473]: DEBUG nova.network.neutron [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Refreshing network info cache for port c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2172.115265] env[61473]: DEBUG nova.network.neutron [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Updated VIF entry in instance network info cache for port c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2172.115624] env[61473]: DEBUG nova.network.neutron [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Updating instance_info_cache with network_info: [{"id": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "address": "fa:16:3e:70:03:6b", "network": {"id": "a2d362b8-2bcd-49ad-ad95-32fdb0f2bf2d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-807420673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5fd5d032e047b8b77b2b727a03f01c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3ae15ac-ea", "ovs_interfaceid": "c3ae15ac-eaa9-4172-b5c9-b08f52ac6ef4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.125278] env[61473]: DEBUG oslo_concurrency.lockutils [req-650e9eb7-a3f0-4060-a291-77aac91a3795 req-40807a3e-d7a0-4370-b751-32eb6f1f883c service nova] Releasing lock "refresh_cache-f9a8791e-2e0e-40a0-9562-12c8545b900a" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2172.242321] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281735, 'name': CreateVM_Task, 'duration_secs': 0.297128} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.242533] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2172.243109] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.243288] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.243617] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2172.243866] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bee338a2-5479-4013-aa52-697186f1b4b6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.248461] env[61473]: DEBUG oslo_vmware.api [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 2172.248461] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]522c1366-e8c4-d616-3d25-5a66afb49b97" [ 2172.248461] env[61473]: _type = "Task" [ 2172.248461] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.256090] env[61473]: DEBUG oslo_vmware.api [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]522c1366-e8c4-d616-3d25-5a66afb49b97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.759289] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2172.759289] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2172.759289] env[61473]: DEBUG oslo_concurrency.lockutils [None req-dec42031-393f-44ad-8d3e-5219199a5752 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.976298] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.966619] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.967228] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.967574] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.967723] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2206.962330] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.965994] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.966832] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.980066] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.980285] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.980456] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.980615] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2208.981727] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d7b91e-237f-498a-b9ce-39fc0f03265f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.991374] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ec28cf-b00b-4d59-a753-4119ed40c866 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.011099] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281db763-5fc8-4077-bdd9-6c5944f29255 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.017341] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8126faac-13fc-4224-a23e-26951583506b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.047590] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180634MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2209.047776] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.048044] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.126061] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126235] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance a6532eba-0297-4320-9357-165e482c3790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126362] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126481] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126603] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126719] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126837] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.126955] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.127090] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.127209] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9a8791e-2e0e-40a0-9562-12c8545b900a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2209.127393] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2209.127604] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=183GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2209.248327] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdfc022-fead-4547-9a62-1af62ece03e5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.255821] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4e596b-e31e-4e59-b39e-acc9a005beda {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.287198] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109f91e7-925a-489b-96d0-3a9d798eba2f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.293943] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab19f55-3786-4897-8399-e2a862754d16 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.306479] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2209.314753] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2209.331906] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2209.332110] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.284s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.332283] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.332713] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2211.332713] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2211.353055] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353258] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353354] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353480] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353603] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353726] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353845] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.353969] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.354101] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.354221] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2211.354350] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2211.354824] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2216.481779] env[61473]: WARNING oslo_vmware.rw_handles [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2216.481779] env[61473]: ERROR oslo_vmware.rw_handles [ 2216.482439] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2216.483982] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2216.484243] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Copying Virtual Disk [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/a180fe42-c6ea-49e5-b5e4-dc1368b11733/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2216.484510] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f39662ef-ca74-4ff4-a422-4b5a96250606 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.492771] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 2216.492771] env[61473]: value = "task-4281736" [ 2216.492771] env[61473]: _type = "Task" [ 2216.492771] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.500863] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.002481] env[61473]: DEBUG oslo_vmware.exceptions [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2217.002779] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.003352] env[61473]: ERROR nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.003352] env[61473]: Faults: ['InvalidArgument'] [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Traceback (most recent call last): [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] yield resources [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self.driver.spawn(context, instance, image_meta, [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self._fetch_image_if_missing(context, vi) [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2217.003352] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] image_cache(vi, tmp_image_ds_loc) [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] vm_util.copy_virtual_disk( [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] session._wait_for_task(vmdk_copy_task) [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return self.wait_for_task(task_ref) [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return evt.wait() [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] result = hub.switch() [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return self.greenlet.switch() [ 2217.003718] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self.f(*self.args, **self.kw) [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] raise exceptions.translate_fault(task_info.error) [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Faults: ['InvalidArgument'] [ 2217.004013] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] [ 2217.004013] env[61473]: INFO nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Terminating instance [ 2217.005219] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.005630] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.005702] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a141532-6d0f-417a-bba9-dd657acfbc20 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.008054] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2217.008363] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2217.008982] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60401619-3b80-4587-b066-a9f82627897a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.016345] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2217.017613] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44195047-f25d-4e9d-9197-8ce2c946dc6f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.019536] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.019829] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2217.020822] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90850567-5e7e-4d6f-91a9-1da274538ec9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.027622] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for the task: (returnval){ [ 2217.027622] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52bc2c1c-4456-0ab7-569d-01e9dcdfae9c" [ 2217.027622] env[61473]: _type = "Task" [ 2217.027622] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.039468] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52bc2c1c-4456-0ab7-569d-01e9dcdfae9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.095187] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2217.095415] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2217.095600] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleting the datastore file [datastore2] 6b12b76a-d5a3-4a60-98e6-b0329389ca75 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.095874] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-769932a7-2974-42f5-b037-6c8e10da3117 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.102227] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for the task: (returnval){ [ 2217.102227] env[61473]: value = "task-4281738" [ 2217.102227] env[61473]: _type = "Task" [ 2217.102227] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.109880] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.538235] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2217.538532] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Creating directory with path [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.538798] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1ccc8d1-4a25-48ac-a508-848b4ffbea3c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.549643] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Created directory with path [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.549866] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Fetch image to [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2217.550033] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2217.550746] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195c6760-db4d-4cd2-a530-36c6dccb61ba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.558524] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7888aa5c-4f1b-4446-9e23-b181e973cb73 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.567377] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4afab5c-793f-4725-b56d-d2270aeec94d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.597410] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abade66-7d6d-4a0d-88a6-0e539de36d09 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.602614] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-387233b3-7b18-4977-aec6-f9176a5b7742 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.611165] env[61473]: DEBUG oslo_vmware.api [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Task: {'id': task-4281738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062523} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.611413] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2217.611594] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2217.611763] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2217.611934] env[61473]: INFO nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2217.613987] env[61473]: DEBUG nova.compute.claims [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2217.614174] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.614389] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.628291] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2217.679407] env[61473]: DEBUG oslo_vmware.rw_handles [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2217.738014] env[61473]: DEBUG oslo_vmware.rw_handles [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2217.738233] env[61473]: DEBUG oslo_vmware.rw_handles [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2217.825139] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c8de24-7c0f-4b05-9fee-f6d05b2f1047 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.832684] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1b18b6-856b-4cf5-b460-224326c20a3b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.864500] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08be2f7-e2c4-4539-b5df-87a01a22e3c4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.871398] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c65c9f-fa9a-4882-b693-5c7bf0529206 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.884265] env[61473]: DEBUG nova.compute.provider_tree [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.892686] env[61473]: DEBUG nova.scheduler.client.report [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2217.905758] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.291s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.906285] env[61473]: ERROR nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.906285] env[61473]: Faults: ['InvalidArgument'] [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Traceback (most recent call last): [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self.driver.spawn(context, instance, image_meta, [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self._fetch_image_if_missing(context, vi) [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] image_cache(vi, tmp_image_ds_loc) [ 2217.906285] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] vm_util.copy_virtual_disk( [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] session._wait_for_task(vmdk_copy_task) [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return self.wait_for_task(task_ref) [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return evt.wait() [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] result = hub.switch() [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] return self.greenlet.switch() [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2217.906620] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] self.f(*self.args, **self.kw) [ 2217.906935] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2217.906935] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] raise exceptions.translate_fault(task_info.error) [ 2217.906935] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2217.906935] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Faults: ['InvalidArgument'] [ 2217.906935] env[61473]: ERROR nova.compute.manager [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] [ 2217.907093] env[61473]: DEBUG nova.compute.utils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2217.908411] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Build of instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 was re-scheduled: A specified parameter was not correct: fileType [ 2217.908411] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2217.908821] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2217.909007] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2217.909195] env[61473]: DEBUG nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2217.909361] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2218.284351] env[61473]: DEBUG nova.network.neutron [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.299600] env[61473]: INFO nova.compute.manager [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Took 0.39 seconds to deallocate network for instance. [ 2218.395595] env[61473]: INFO nova.scheduler.client.report [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Deleted allocations for instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 [ 2218.419141] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5aa1ea1f-fc96-41d9-8254-9940e373ad6d tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 545.626s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.419514] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 349.533s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.419795] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.420068] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.420283] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.422670] env[61473]: INFO nova.compute.manager [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Terminating instance [ 2218.424396] env[61473]: DEBUG nova.compute.manager [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2218.424682] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2218.425273] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23f9c063-d626-4011-a987-963ac1811312 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.437409] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0145b3-cad9-4da4-9001-ddb43c5d98ba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.465500] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6b12b76a-d5a3-4a60-98e6-b0329389ca75 could not be found. [ 2218.465717] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2218.465896] env[61473]: INFO nova.compute.manager [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2218.466165] env[61473]: DEBUG oslo.service.loopingcall [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.466397] env[61473]: DEBUG nova.compute.manager [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2218.466494] env[61473]: DEBUG nova.network.neutron [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2218.494558] env[61473]: DEBUG nova.network.neutron [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.502931] env[61473]: INFO nova.compute.manager [-] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] Took 0.04 seconds to deallocate network for instance. [ 2218.592598] env[61473]: DEBUG oslo_concurrency.lockutils [None req-944011af-3187-403c-97c7-1f17aff434c9 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.593513] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 326.405s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.593713] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 6b12b76a-d5a3-4a60-98e6-b0329389ca75] During sync_power_state the instance has a pending task (deleting). Skip. [ 2218.593892] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "6b12b76a-d5a3-4a60-98e6-b0329389ca75" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.984678] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2220.460786] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "ba226385-7cc1-4a50-bff1-e40c11b51471" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.461326] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "ba226385-7cc1-4a50-bff1-e40c11b51471" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.472325] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2220.523047] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.523313] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.524806] env[61473]: INFO nova.compute.claims [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2220.688507] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a355799-19d6-4579-9a9b-69a211434bfb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.696067] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b264e4ed-d5ad-4973-934b-c5741a054050 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.725491] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fada456b-e783-4a88-815e-9f3d8cca87da {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.732426] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd228960-25e6-4e1a-a60a-9cd5f7299300 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.746311] env[61473]: DEBUG nova.compute.provider_tree [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2220.755696] env[61473]: DEBUG nova.scheduler.client.report [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2220.769780] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.246s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.770273] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2220.804282] env[61473]: DEBUG nova.compute.utils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2220.805719] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2220.805892] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2220.815294] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2220.879551] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2220.886444] env[61473]: DEBUG nova.policy [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eda5c2e486864d80b4b3f1415a181dcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75fb9e80f03749519e953a48c30915c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2220.905301] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2220.905414] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2220.905549] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2220.905850] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2220.906064] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2220.906265] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2220.906519] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2220.906872] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2220.907491] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2220.907756] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2220.908042] env[61473]: DEBUG nova.virt.hardware [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2220.908971] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935260da-a625-447b-b36b-71062d986b01 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.917401] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfe47ee-0083-4a83-b910-a85e0fe6bc44 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.262163] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Successfully created port: 183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2221.809879] env[61473]: DEBUG nova.compute.manager [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Received event network-vif-plugged-183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2221.810145] env[61473]: DEBUG oslo_concurrency.lockutils [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] Acquiring lock "ba226385-7cc1-4a50-bff1-e40c11b51471-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.810335] env[61473]: DEBUG oslo_concurrency.lockutils [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] Lock "ba226385-7cc1-4a50-bff1-e40c11b51471-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.810503] env[61473]: DEBUG oslo_concurrency.lockutils [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] Lock "ba226385-7cc1-4a50-bff1-e40c11b51471-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.810674] env[61473]: DEBUG nova.compute.manager [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] No waiting events found dispatching network-vif-plugged-183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2221.810838] env[61473]: WARNING nova.compute.manager [req-2c2a9b80-6a31-4167-9d15-9c7211903761 req-9f78da52-27ca-4495-a09d-d9b6233a2142 service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Received unexpected event network-vif-plugged-183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 for instance with vm_state building and task_state spawning. [ 2221.889065] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Successfully updated port: 183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2221.901531] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.901704] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.901862] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2221.948088] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2222.357265] env[61473]: DEBUG nova.network.neutron [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Updating instance_info_cache with network_info: [{"id": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "address": "fa:16:3e:b8:42:b7", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap183e7a0a-0f", "ovs_interfaceid": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2222.372046] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.372345] env[61473]: DEBUG nova.compute.manager [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Instance network_info: |[{"id": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "address": "fa:16:3e:b8:42:b7", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap183e7a0a-0f", "ovs_interfaceid": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1992}} [ 2222.372746] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:42:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2', 'vif_model': 'vmxnet3'}] {{(pid=61473) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2222.380504] env[61473]: DEBUG oslo.service.loopingcall [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2222.380970] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Creating VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2222.381229] env[61473]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7576ab5c-fe00-4ca3-a5c4-b1d1e80adcf0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.401197] env[61473]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2222.401197] env[61473]: value = "task-4281739" [ 2222.401197] env[61473]: _type = "Task" [ 2222.401197] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.408480] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281739, 'name': CreateVM_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.912879] env[61473]: DEBUG oslo_vmware.api [-] Task: {'id': task-4281739, 'name': CreateVM_Task, 'duration_secs': 0.266131} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.913232] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Created VM on the ESX host {{(pid=61473) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2222.913707] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2222.913877] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2222.914235] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2222.914482] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed7c048c-76fd-42c1-94d1-4c7267ed32eb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.918807] env[61473]: DEBUG oslo_vmware.api [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 2222.918807] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]523be5db-d581-f013-edc1-bced28c82503" [ 2222.918807] env[61473]: _type = "Task" [ 2222.918807] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.926133] env[61473]: DEBUG oslo_vmware.api [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]523be5db-d581-f013-edc1-bced28c82503, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.429929] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2223.430187] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Processing image aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2223.430404] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b63e9e25-637b-4c63-b5da-d72044420419 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.838089] env[61473]: DEBUG nova.compute.manager [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Received event network-changed-183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11158}} [ 2223.838293] env[61473]: DEBUG nova.compute.manager [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Refreshing instance network info cache due to event network-changed-183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2. {{(pid=61473) external_instance_event /opt/stack/nova/nova/compute/manager.py:11163}} [ 2223.838508] env[61473]: DEBUG oslo_concurrency.lockutils [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] Acquiring lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.838652] env[61473]: DEBUG oslo_concurrency.lockutils [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] Acquired lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.838855] env[61473]: DEBUG nova.network.neutron [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Refreshing network info cache for port 183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2 {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2224.090699] env[61473]: DEBUG nova.network.neutron [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Updated VIF entry in instance network info cache for port 183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2. {{(pid=61473) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2224.091340] env[61473]: DEBUG nova.network.neutron [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Updating instance_info_cache with network_info: [{"id": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "address": "fa:16:3e:b8:42:b7", "network": {"id": "773be5d3-32e0-4a3c-ae29-8e93a30b5454", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-840600896-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75fb9e80f03749519e953a48c30915c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap183e7a0a-0f", "ovs_interfaceid": "183e7a0a-0fd9-49a9-8b6d-767b85b5b2a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.101124] env[61473]: DEBUG oslo_concurrency.lockutils [req-0aa0f330-c057-4991-95af-6dd1e5052e14 req-c2216706-23b3-4ae2-9490-97a8f7d1187b service nova] Releasing lock "refresh_cache-ba226385-7cc1-4a50-bff1-e40c11b51471" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.643186] env[61473]: DEBUG oslo_concurrency.lockutils [None req-41eb223b-4efa-4c3d-ad4a-11eb7d0af222 tempest-ImagesTestJSON-1599981621 tempest-ImagesTestJSON-1599981621-project-member] Acquiring lock "bf13952e-d219-4c77-9a73-ada311eeb053" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2263.966101] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.966019] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.966385] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.966439] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2266.037402] env[61473]: WARNING oslo_vmware.rw_handles [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2266.037402] env[61473]: ERROR oslo_vmware.rw_handles [ 2266.037832] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2266.039568] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2266.039810] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Copying Virtual Disk [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/75016de5-204c-46ab-8196-5124ab747047/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2266.040121] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f20fb58-2438-4a70-bb15-4c719cedc76c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.048336] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for the task: (returnval){ [ 2266.048336] env[61473]: value = "task-4281740" [ 2266.048336] env[61473]: _type = "Task" [ 2266.048336] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.055951] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': task-4281740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.558670] env[61473]: DEBUG oslo_vmware.exceptions [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2266.558979] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2266.559577] env[61473]: ERROR nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.559577] env[61473]: Faults: ['InvalidArgument'] [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] Traceback (most recent call last): [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] yield resources [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self.driver.spawn(context, instance, image_meta, [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self._fetch_image_if_missing(context, vi) [ 2266.559577] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] image_cache(vi, tmp_image_ds_loc) [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] vm_util.copy_virtual_disk( [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] session._wait_for_task(vmdk_copy_task) [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return self.wait_for_task(task_ref) [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return evt.wait() [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] result = hub.switch() [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2266.559884] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return self.greenlet.switch() [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self.f(*self.args, **self.kw) [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] raise exceptions.translate_fault(task_info.error) [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] Faults: ['InvalidArgument'] [ 2266.560209] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] [ 2266.560209] env[61473]: INFO nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Terminating instance [ 2266.561441] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.561650] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2266.561881] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab4d23e-ab40-42b5-86f4-7a5802e9e7e9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.565174] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2266.565368] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2266.566065] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fb6ae7-4ff5-4362-bb78-821298b1a318 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.572208] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2266.572410] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1afa8da-7a94-4c43-b194-f4075d9aa7df {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.574427] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2266.574599] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2266.575518] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bbccb09-1ed8-4e25-a12f-d8401c96238b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.580156] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for the task: (returnval){ [ 2266.580156] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]522a4814-42cc-80f2-b881-5b5290fd7366" [ 2266.580156] env[61473]: _type = "Task" [ 2266.580156] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.586867] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]522a4814-42cc-80f2-b881-5b5290fd7366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.638935] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2266.639241] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2266.639476] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Deleting the datastore file [datastore2] a6532eba-0297-4320-9357-165e482c3790 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2266.639761] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef949ebf-a1bd-4cf5-8775-031d3bbf73a0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.645889] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for the task: (returnval){ [ 2266.645889] env[61473]: value = "task-4281742" [ 2266.645889] env[61473]: _type = "Task" [ 2266.645889] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.653551] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': task-4281742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.966548] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.090514] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2267.090788] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Creating directory with path [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2267.091033] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6efb7861-d463-420a-b6ed-159accc1803f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.101505] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Created directory with path [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2267.101687] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Fetch image to [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2267.101860] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2267.102562] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50662e38-96a9-4141-9471-3474e733cc54 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.108743] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb922735-cb3f-4b76-93e5-ef4c1cca061c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.117370] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390796a5-7819-48cb-9fd6-d705b031afba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.149576] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf8f95d-3523-463a-ab9f-164204d4bb7f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.155924] env[61473]: DEBUG oslo_vmware.api [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Task: {'id': task-4281742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07509} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.157345] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2267.157535] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2267.157707] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2267.157877] env[61473]: INFO nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2267.159658] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5da9bd7-4926-4455-b4a2-8222d015cb83 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.161473] env[61473]: DEBUG nova.compute.claims [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2267.161642] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.161854] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.186690] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2267.249256] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2267.307401] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2267.307585] env[61473]: DEBUG oslo_vmware.rw_handles [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2267.387422] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946be77c-863d-4654-8c76-a1a4d9028ce2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.394957] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bec90a-a490-436e-8312-5587f7b5b944 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.425133] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247fded4-a6b4-44bc-840c-73717eca50d8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.431721] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fb5323-a574-40cc-826d-e4db59917a53 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.444202] env[61473]: DEBUG nova.compute.provider_tree [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.453073] env[61473]: DEBUG nova.scheduler.client.report [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2267.467084] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.467596] env[61473]: ERROR nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.467596] env[61473]: Faults: ['InvalidArgument'] [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] Traceback (most recent call last): [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self.driver.spawn(context, instance, image_meta, [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self._fetch_image_if_missing(context, vi) [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] image_cache(vi, tmp_image_ds_loc) [ 2267.467596] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] vm_util.copy_virtual_disk( [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] session._wait_for_task(vmdk_copy_task) [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return self.wait_for_task(task_ref) [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return evt.wait() [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] result = hub.switch() [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] return self.greenlet.switch() [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2267.467922] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] self.f(*self.args, **self.kw) [ 2267.468219] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2267.468219] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] raise exceptions.translate_fault(task_info.error) [ 2267.468219] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2267.468219] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] Faults: ['InvalidArgument'] [ 2267.468219] env[61473]: ERROR nova.compute.manager [instance: a6532eba-0297-4320-9357-165e482c3790] [ 2267.468333] env[61473]: DEBUG nova.compute.utils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2267.469709] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Build of instance a6532eba-0297-4320-9357-165e482c3790 was re-scheduled: A specified parameter was not correct: fileType [ 2267.469709] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2267.470121] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2267.470299] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2267.470472] env[61473]: DEBUG nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2267.470646] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2267.823960] env[61473]: DEBUG nova.network.neutron [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.834159] env[61473]: INFO nova.compute.manager [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Took 0.36 seconds to deallocate network for instance. [ 2267.925722] env[61473]: INFO nova.scheduler.client.report [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Deleted allocations for instance a6532eba-0297-4320-9357-165e482c3790 [ 2267.951342] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b692b4fa-e213-45c8-a84b-e0c2b85cf7ba tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 558.509s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.951596] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "a6532eba-0297-4320-9357-165e482c3790" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 375.763s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.951785] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: a6532eba-0297-4320-9357-165e482c3790] During sync_power_state the instance has a pending task (spawning). Skip. [ 2267.951956] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "a6532eba-0297-4320-9357-165e482c3790" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.952504] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 362.303s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.952700] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "a6532eba-0297-4320-9357-165e482c3790-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.952907] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.953092] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.955050] env[61473]: INFO nova.compute.manager [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Terminating instance [ 2267.957506] env[61473]: DEBUG nova.compute.manager [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2267.957700] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2267.957986] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a060d595-8130-4b5a-a021-51650b970a76 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.962143] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.966510] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.970254] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48c2567-9e1f-49d1-85d6-462b82017721 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.999500] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6532eba-0297-4320-9357-165e482c3790 could not be found. [ 2267.999651] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2267.999922] env[61473]: INFO nova.compute.manager [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] [instance: a6532eba-0297-4320-9357-165e482c3790] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2268.000089] env[61473]: DEBUG oslo.service.loopingcall [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2268.000357] env[61473]: DEBUG nova.compute.manager [-] [instance: a6532eba-0297-4320-9357-165e482c3790] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2268.000454] env[61473]: DEBUG nova.network.neutron [-] [instance: a6532eba-0297-4320-9357-165e482c3790] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2268.025319] env[61473]: DEBUG nova.network.neutron [-] [instance: a6532eba-0297-4320-9357-165e482c3790] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.034497] env[61473]: INFO nova.compute.manager [-] [instance: a6532eba-0297-4320-9357-165e482c3790] Took 0.03 seconds to deallocate network for instance. [ 2268.126245] env[61473]: DEBUG oslo_concurrency.lockutils [None req-3a300533-0961-4f7f-b936-744ec27a3db6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Lock "a6532eba-0297-4320-9357-165e482c3790" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.966601] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.966864] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.978234] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.978456] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.978626] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.978787] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2270.979946] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daa0311-e4d3-447a-96c8-98c00c6675bb {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.988573] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb600a3-ef71-4fd0-84b0-6ca7a7e21b5d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.002481] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ab4a3b-fe8e-485e-abe5-50b9bec9bcd4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.008367] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f007cd-3a02-4687-90e1-07513f4db0ba {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.038226] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180606MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2271.038366] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2271.038550] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2271.102275] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance db8f9797-0e07-422c-b0d5-562189fc3f3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.102427] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.102554] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.102677] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.102798] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.102919] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.103052] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.103173] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9a8791e-2e0e-40a0-9562-12c8545b900a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.103288] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ba226385-7cc1-4a50-bff1-e40c11b51471 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2271.103469] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2271.103605] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=183GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2271.207194] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47708a3f-8446-4a67-83b6-5e951defb45e {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.214825] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ba0928-3716-45a9-bdef-f5c1b4ce35fd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.244183] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb574041-6029-4fa6-a210-42ab915605b1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.251404] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6d06c1-dbad-40d1-b46b-da91b08c325a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.264038] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2271.272419] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2271.286915] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2271.287141] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.249s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2273.287618] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.287904] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2273.287940] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2273.306534] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.306691] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.306838] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.306964] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307099] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307252] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307419] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307546] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307664] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2273.307782] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2298.862507] env[61473]: DEBUG oslo_concurrency.lockutils [None req-97e4723a-caed-4a3f-983d-532e0691a9f6 tempest-ServersTestJSON-1714720745 tempest-ServersTestJSON-1714720745-project-member] Acquiring lock "b35a272f-6743-4e9e-8181-4e704bb3aa06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2316.518173] env[61473]: WARNING oslo_vmware.rw_handles [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2316.518173] env[61473]: ERROR oslo_vmware.rw_handles [ 2316.518846] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2316.520615] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2316.520874] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Copying Virtual Disk [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/9f900e3f-b852-40c5-abe4-6032a144f25c/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2316.521188] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4de4b39e-72b2-4ce9-8cd8-c6861127fe3c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.528871] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for the task: (returnval){ [ 2316.528871] env[61473]: value = "task-4281743" [ 2316.528871] env[61473]: _type = "Task" [ 2316.528871] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.536832] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Task: {'id': task-4281743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.040080] env[61473]: DEBUG oslo_vmware.exceptions [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2317.040380] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2317.040925] env[61473]: ERROR nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2317.040925] env[61473]: Faults: ['InvalidArgument'] [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Traceback (most recent call last): [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] yield resources [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.driver.spawn(context, instance, image_meta, [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._fetch_image_if_missing(context, vi) [ 2317.040925] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] image_cache(vi, tmp_image_ds_loc) [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] vm_util.copy_virtual_disk( [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] session._wait_for_task(vmdk_copy_task) [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.wait_for_task(task_ref) [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return evt.wait() [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] result = hub.switch() [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2317.041332] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.greenlet.switch() [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.f(*self.args, **self.kw) [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] raise exceptions.translate_fault(task_info.error) [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Faults: ['InvalidArgument'] [ 2317.041704] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] [ 2317.041704] env[61473]: INFO nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Terminating instance [ 2317.042827] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2317.043079] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2317.043320] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70e3a0b4-1346-490a-97cf-f931750bb34a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.045400] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2317.045558] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2317.045725] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2317.052644] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2317.052817] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2317.053996] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11c8419a-65e5-44ff-b663-fbfac5e774f0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.061393] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 2317.061393] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52be7a2d-ac7c-f74d-21e0-2f752f91d6ba" [ 2317.061393] env[61473]: _type = "Task" [ 2317.061393] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.068979] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52be7a2d-ac7c-f74d-21e0-2f752f91d6ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.116906] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2317.186596] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2317.197733] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Releasing lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2317.198173] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2317.198386] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2317.199493] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2383791-c0f7-4109-80dc-5406d951aec9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.208505] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2317.208731] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39d05079-9070-419c-86cc-e5ef241d86d8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.242819] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2317.243041] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2317.243233] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Deleting the datastore file [datastore2] db8f9797-0e07-422c-b0d5-562189fc3f3d {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2317.243475] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cf88aec-1eff-4a29-a402-8fecb5977587 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.249310] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for the task: (returnval){ [ 2317.249310] env[61473]: value = "task-4281745" [ 2317.249310] env[61473]: _type = "Task" [ 2317.249310] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.257292] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Task: {'id': task-4281745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.571627] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2317.572022] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating directory with path [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2317.572076] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-675b7067-cf2f-4e35-95c5-f8a7e78d4033 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.582912] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Created directory with path [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2317.583109] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Fetch image to [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2317.583283] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2317.583968] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e439e66-a507-4631-b66e-c8b0979870ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.590349] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606fa0f9-c2e5-4260-b2a5-2e997a5e400f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.598986] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c51ef73-75ec-45e9-aa26-2917d1669209 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.628329] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ced3ea5-dc20-4c8d-b0fd-e028db192afe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.633439] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2f843b30-1466-44cc-8ccc-a1870d2fc889 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.653595] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2317.702702] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2317.761798] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2317.761999] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2317.765824] env[61473]: DEBUG oslo_vmware.api [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Task: {'id': task-4281745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031683} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.766148] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2317.766359] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2317.766527] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2317.766791] env[61473]: INFO nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Took 0.57 seconds to destroy the instance on the hypervisor. [ 2317.766973] env[61473]: DEBUG oslo.service.loopingcall [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2317.767161] env[61473]: DEBUG nova.compute.manager [-] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2317.769312] env[61473]: DEBUG nova.compute.claims [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2317.769473] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.769680] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.935209] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6e65c7-212f-46e6-9a10-70cca78f93df {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.944264] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f553569-32df-47ce-8ef0-6de316dee2ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.982082] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86575ca-72c7-474e-a165-c767ac4b0d7d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.994105] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab791ce-c3bf-4b9b-aa53-78bb63fa1a4f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.011017] env[61473]: DEBUG nova.compute.provider_tree [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2318.017746] env[61473]: DEBUG nova.scheduler.client.report [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2318.035633] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.266s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.036199] env[61473]: ERROR nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.036199] env[61473]: Faults: ['InvalidArgument'] [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Traceback (most recent call last): [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.driver.spawn(context, instance, image_meta, [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._fetch_image_if_missing(context, vi) [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] image_cache(vi, tmp_image_ds_loc) [ 2318.036199] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] vm_util.copy_virtual_disk( [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] session._wait_for_task(vmdk_copy_task) [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.wait_for_task(task_ref) [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return evt.wait() [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] result = hub.switch() [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.greenlet.switch() [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2318.036694] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.f(*self.args, **self.kw) [ 2318.038318] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2318.038318] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] raise exceptions.translate_fault(task_info.error) [ 2318.038318] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2318.038318] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Faults: ['InvalidArgument'] [ 2318.038318] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] [ 2318.038318] env[61473]: DEBUG nova.compute.utils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2318.038712] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Build of instance db8f9797-0e07-422c-b0d5-562189fc3f3d was re-scheduled: A specified parameter was not correct: fileType [ 2318.038712] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2318.041536] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2318.041536] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2318.041536] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2318.041536] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2318.076224] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2318.146231] env[61473]: DEBUG nova.network.neutron [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2318.155635] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Releasing lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2318.156010] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2318.156217] env[61473]: DEBUG nova.compute.manager [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Skipping network deallocation for instance since networking was not requested. {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2318.253080] env[61473]: INFO nova.scheduler.client.report [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Deleted allocations for instance db8f9797-0e07-422c-b0d5-562189fc3f3d [ 2318.274140] env[61473]: DEBUG oslo_concurrency.lockutils [None req-7bd6b09c-bdf8-4d52-9b36-21bcf1978b7c tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 605.414s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.274482] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 426.085s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.274589] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] During sync_power_state the instance has a pending task (spawning). Skip. [ 2318.274777] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.275018] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 409.415s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.275232] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "db8f9797-0e07-422c-b0d5-562189fc3f3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.275435] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.275781] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.279883] env[61473]: INFO nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Terminating instance [ 2318.281618] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquiring lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2318.281778] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Acquired lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2318.281960] env[61473]: DEBUG nova.network.neutron [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Building network info cache for instance {{(pid=61473) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2318.312471] env[61473]: DEBUG nova.network.neutron [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance cache missing network info. {{(pid=61473) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2318.378917] env[61473]: DEBUG nova.network.neutron [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2318.388448] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Releasing lock "refresh_cache-db8f9797-0e07-422c-b0d5-562189fc3f3d" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2318.388859] env[61473]: DEBUG nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2318.389091] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2318.389590] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad0f2e49-0e54-4190-b260-857125496000 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.399216] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0130597-1ffd-4fe1-a270-f1b8b7762af2 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.427567] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db8f9797-0e07-422c-b0d5-562189fc3f3d could not be found. [ 2318.427758] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2318.427931] env[61473]: INFO nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2318.428180] env[61473]: DEBUG oslo.service.loopingcall [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2318.428395] env[61473]: DEBUG nova.compute.manager [-] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2318.428490] env[61473]: DEBUG nova.network.neutron [-] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2318.550891] env[61473]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61473) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2318.551162] env[61473]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-2073befb-30a5-42aa-b587-ccd55f996546'] [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2318.551686] env[61473]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2318.552301] env[61473]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.552780] env[61473]: ERROR oslo.service.loopingcall [ 2318.553188] env[61473]: ERROR nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.580646] env[61473]: ERROR nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Traceback (most recent call last): [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] exception_handler_v20(status_code, error_body) [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] raise client_exc(message=error_message, [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Neutron server returns request_ids: ['req-2073befb-30a5-42aa-b587-ccd55f996546'] [ 2318.580646] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] During handling of the above exception, another exception occurred: [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Traceback (most recent call last): [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._delete_instance(context, instance, bdms) [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._shutdown_instance(context, instance, bdms) [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._try_deallocate_network(context, instance, requested_networks) [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] with excutils.save_and_reraise_exception(): [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.581193] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.force_reraise() [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] raise self.value [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] _deallocate_network_with_retries() [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return evt.wait() [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] result = hub.switch() [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.greenlet.switch() [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2318.581568] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] result = func(*self.args, **self.kw) [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] result = f(*args, **kwargs) [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._deallocate_network( [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self.network_api.deallocate_for_instance( [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] data = neutron.list_ports(**search_opts) [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.list('ports', self.ports_path, retrieve_all, [ 2318.581882] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] for r in self._pagination(collection, path, **params): [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] res = self.get(path, params=params) [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.retry_request("GET", action, body=body, [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2318.582243] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] return self.do_request(method, action, body=body, [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] ret = obj(*args, **kwargs) [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] self._handle_fault_response(status_code, replybody, resp) [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.582605] env[61473]: ERROR nova.compute.manager [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] [ 2318.606692] env[61473]: DEBUG oslo_concurrency.lockutils [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Lock "db8f9797-0e07-422c-b0d5-562189fc3f3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.332s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.650716] env[61473]: INFO nova.compute.manager [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] [instance: db8f9797-0e07-422c-b0d5-562189fc3f3d] Successfully reverted task state from None on failure for instance. [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server [None req-b57ad265-12a3-4149-9088-887a1d37ad69 tempest-ServerShowV254Test-32043958 tempest-ServerShowV254Test-32043958-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-2073befb-30a5-42aa-b587-ccd55f996546'] [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2318.653873] env[61473]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.654356] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3349, in terminate_instance [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in do_terminate_instance [ 2318.654841] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3337, in do_terminate_instance [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3272, in _delete_instance [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3166, in _shutdown_instance [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3080, in _try_deallocate_network [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server raise self.value [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3078, in _try_deallocate_network [ 2318.655272] env[61473]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3067, in _deallocate_network_with_retries [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2287, in _deallocate_network [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.655709] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2318.656192] env[61473]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2318.656678] env[61473]: ERROR oslo_messaging.rpc.server [ 2319.017477] env[61473]: DEBUG oslo_concurrency.lockutils [None req-73b387c4-0602-4b07-ac37-cb29c6bec7a7 tempest-ServerTagsTestJSON-1162836989 tempest-ServerTagsTestJSON-1162836989-project-member] Acquiring lock "8a17da2e-1070-43a1-bc00-22d9b04dd806" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2325.967326] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.967604] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.967699] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2327.961741] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.965380] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.966619] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.966866] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.966097] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.966635] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2331.978625] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.978838] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.979011] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.979203] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2331.980292] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fe0af7-76f2-410d-9c1e-3c25af6d6a9f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.988901] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90816a9b-870d-4147-b9cb-74bc011878b8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.002932] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febcbf4e-ac6d-4d8e-a60d-f8be1e6625a5 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.008956] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86094941-84a9-47bd-88d5-e4b4236244f7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.038411] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180652MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2332.038547] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.038736] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.101786] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.101945] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102095] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102225] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102344] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102460] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102579] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9a8791e-2e0e-40a0-9562-12c8545b900a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102696] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ba226385-7cc1-4a50-bff1-e40c11b51471 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2332.102870] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2332.103010] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=183GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2332.198067] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf61bc2-16ce-400e-9639-e985323c37d8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.205738] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d0cd1b-da7c-4906-9d64-54ef995d6b1a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.234820] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6907dcb3-cb6d-471b-8036-0c7af5d1756c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.242931] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517bb2a7-f13c-429d-8821-77f7459d725d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.257653] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2332.265991] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2332.280271] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2332.280452] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.242s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.280538] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.280870] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2333.280870] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2333.298271] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.298443] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.298588] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.298715] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.298839] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.298960] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.299103] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.299223] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2333.299345] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2343.982874] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.791733] env[61473]: DEBUG oslo_concurrency.lockutils [None req-64ee26b3-3c4d-4c57-9a4d-e72d5ff4f8d6 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "f9a8791e-2e0e-40a0-9562-12c8545b900a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2366.534171] env[61473]: WARNING oslo_vmware.rw_handles [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2366.534171] env[61473]: ERROR oslo_vmware.rw_handles [ 2366.534799] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2366.537073] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2366.537365] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Copying Virtual Disk [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/285e3de1-07d5-4142-84b5-33b78ff0164a/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2366.537677] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e645abe-88fd-4677-8288-6af1e01f1581 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.547856] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 2366.547856] env[61473]: value = "task-4281746" [ 2366.547856] env[61473]: _type = "Task" [ 2366.547856] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.555454] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.058298] env[61473]: DEBUG oslo_vmware.exceptions [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2367.058493] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2367.059038] env[61473]: ERROR nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.059038] env[61473]: Faults: ['InvalidArgument'] [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Traceback (most recent call last): [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] yield resources [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self.driver.spawn(context, instance, image_meta, [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self._fetch_image_if_missing(context, vi) [ 2367.059038] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] image_cache(vi, tmp_image_ds_loc) [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] vm_util.copy_virtual_disk( [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] session._wait_for_task(vmdk_copy_task) [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return self.wait_for_task(task_ref) [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return evt.wait() [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] result = hub.switch() [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2367.059387] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return self.greenlet.switch() [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self.f(*self.args, **self.kw) [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] raise exceptions.translate_fault(task_info.error) [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Faults: ['InvalidArgument'] [ 2367.059697] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] [ 2367.059697] env[61473]: INFO nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Terminating instance [ 2367.061025] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2367.061228] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2367.061468] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bb2159d-3827-4f09-b5d7-c1296ead3e52 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.063583] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2367.063782] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2367.064526] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac8e80e-1aca-420a-9be9-a311614a9852 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.072091] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2367.072300] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29c18ae7-f5d1-4eae-800a-a67f864c3694 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.074298] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2367.074475] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2367.075385] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f44cca9-00a1-4a3e-9c40-dbfad817f5ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.079898] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 2367.079898] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]525d716e-95d9-0f27-e115-981f607de0fe" [ 2367.079898] env[61473]: _type = "Task" [ 2367.079898] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.086837] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]525d716e-95d9-0f27-e115-981f607de0fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.136061] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2367.136321] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2367.136509] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleting the datastore file [datastore2] 46b86ba3-99de-4493-b066-0a99bc2d2f27 {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2367.136776] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d56fe89b-6859-48e5-b52a-352aa52311ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.146201] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for the task: (returnval){ [ 2367.146201] env[61473]: value = "task-4281748" [ 2367.146201] env[61473]: _type = "Task" [ 2367.146201] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.153837] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.590721] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2367.590999] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating directory with path [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2367.591300] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bfda7ba-300a-4b1d-8ae1-27782fec7cfd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.602297] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Created directory with path [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2367.602531] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Fetch image to [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2367.602767] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2367.603490] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9dbfd1-c376-4e2e-8c11-d37aeca7c8ab {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.609743] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dc4359-25fa-4c14-b395-6e7dfa57f17a {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.618401] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8c7017-9be9-4d28-adef-415f237d66e7 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.651381] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52eb7717-9e55-4d27-a06c-97743454bc95 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.659448] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5b1dfed1-a820-4774-8a7b-b4017cf2c084 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.661032] env[61473]: DEBUG oslo_vmware.api [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Task: {'id': task-4281748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07991} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2367.661273] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2367.661453] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2367.661627] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2367.661798] env[61473]: INFO nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2367.663962] env[61473]: DEBUG nova.compute.claims [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2367.664140] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2367.664353] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2367.683015] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2367.733861] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2367.794175] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2367.794419] env[61473]: DEBUG oslo_vmware.rw_handles [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2367.869372] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c01b44-9b59-4721-9f61-9c9858c24948 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.876257] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80958a76-dd3f-41e1-a8b0-b662b2b21a03 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.908476] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c302fb-7d62-4617-aac3-c8a5da65afee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.915378] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932312da-1378-4b39-b75c-a9649d425c82 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.927985] env[61473]: DEBUG nova.compute.provider_tree [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2367.936829] env[61473]: DEBUG nova.scheduler.client.report [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2367.951232] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2367.951794] env[61473]: ERROR nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.951794] env[61473]: Faults: ['InvalidArgument'] [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Traceback (most recent call last): [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self.driver.spawn(context, instance, image_meta, [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self._fetch_image_if_missing(context, vi) [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] image_cache(vi, tmp_image_ds_loc) [ 2367.951794] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] vm_util.copy_virtual_disk( [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] session._wait_for_task(vmdk_copy_task) [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return self.wait_for_task(task_ref) [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return evt.wait() [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] result = hub.switch() [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] return self.greenlet.switch() [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2367.952238] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] self.f(*self.args, **self.kw) [ 2367.952566] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2367.952566] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] raise exceptions.translate_fault(task_info.error) [ 2367.952566] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2367.952566] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Faults: ['InvalidArgument'] [ 2367.952566] env[61473]: ERROR nova.compute.manager [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] [ 2367.952566] env[61473]: DEBUG nova.compute.utils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2367.954392] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Build of instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 was re-scheduled: A specified parameter was not correct: fileType [ 2367.954392] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2367.954773] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2367.954949] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2367.955137] env[61473]: DEBUG nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2367.955307] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2368.423960] env[61473]: DEBUG nova.network.neutron [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2368.438233] env[61473]: INFO nova.compute.manager [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Took 0.48 seconds to deallocate network for instance. [ 2368.533951] env[61473]: INFO nova.scheduler.client.report [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Deleted allocations for instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 [ 2368.556761] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ffb7c21d-4316-473f-8ddd-2b4640c7f04e tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 642.572s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.557016] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 476.368s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.557235] env[61473]: INFO nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] During sync_power_state the instance has a pending task (spawning). Skip. [ 2368.557417] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.557903] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.657s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.558133] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Acquiring lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2368.558342] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2368.558506] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2368.560393] env[61473]: INFO nova.compute.manager [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Terminating instance [ 2368.562067] env[61473]: DEBUG nova.compute.manager [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2368.562267] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2368.562527] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d5eba52-0a88-4c05-a05b-90b56bdf5f4c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.572999] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da7bb03-0c07-46b0-9599-40f1543f37ac {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.602066] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46b86ba3-99de-4493-b066-0a99bc2d2f27 could not be found. [ 2368.602332] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2368.602472] env[61473]: INFO nova.compute.manager [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2368.602713] env[61473]: DEBUG oslo.service.loopingcall [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2368.602932] env[61473]: DEBUG nova.compute.manager [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2368.603040] env[61473]: DEBUG nova.network.neutron [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2368.636458] env[61473]: DEBUG nova.network.neutron [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2368.645968] env[61473]: INFO nova.compute.manager [-] [instance: 46b86ba3-99de-4493-b066-0a99bc2d2f27] Took 0.04 seconds to deallocate network for instance. [ 2368.756885] env[61473]: DEBUG oslo_concurrency.lockutils [None req-a5d14b50-0e6d-4077-9e02-31d2c441b3b7 tempest-AttachInterfacesTestJSON-905198513 tempest-AttachInterfacesTestJSON-905198513-project-member] Lock "46b86ba3-99de-4493-b066-0a99bc2d2f27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.966927] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.967295] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.967424] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.967577] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2389.966875] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2390.967352] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.966795] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.967056] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.967226] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.978816] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2391.979148] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2391.979207] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.979398] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2391.980935] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22efccc3-a931-4f91-babe-d7a8fbee8e53 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.989318] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac98eb8-95fb-4804-a532-db9e9be688f6 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.002897] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2c0474-9bb6-4acf-ac26-528ddfec4f3c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.009064] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee526b7c-fbe7-471e-bfbb-7b085c56b5fe {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.039125] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180647MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2392.039287] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2392.039519] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2392.102307] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.102469] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.102604] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.102716] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.102880] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.102957] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9a8791e-2e0e-40a0-9562-12c8545b900a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.103087] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ba226385-7cc1-4a50-bff1-e40c11b51471 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2392.103315] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2392.103485] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=183GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2392.191529] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e00980-b021-4588-821b-d8142bc92323 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.199074] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87319ab-4626-41d0-b815-0bfafa36672d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.228144] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3a3ff8-7b8f-4349-a61a-3d8fc54ece4b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.234780] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641f42ab-4b1d-4b72-ad73-61d899738460 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.247516] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2392.256843] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2392.269908] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2392.270097] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.231s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2393.269592] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2393.269895] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2393.269972] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2393.288279] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.288476] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.288571] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.288690] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.288814] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.288934] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.289064] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2393.289187] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2415.718056] env[61473]: WARNING oslo_vmware.rw_handles [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles response.begin() [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2415.718056] env[61473]: ERROR oslo_vmware.rw_handles [ 2415.719114] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Downloaded image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2415.720523] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Caching image {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2415.720788] env[61473]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Copying Virtual Disk [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk to [datastore2] vmware_temp/5327a171-c1a4-4f9d-821e-8b577f6b94e6/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk {{(pid=61473) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2415.721096] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a21756de-a07e-450b-85b0-07bf7ec88367 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.730055] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 2415.730055] env[61473]: value = "task-4281749" [ 2415.730055] env[61473]: _type = "Task" [ 2415.730055] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2415.737658] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.240268] env[61473]: DEBUG oslo_vmware.exceptions [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Fault InvalidArgument not matched. {{(pid=61473) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2416.240586] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2416.241236] env[61473]: ERROR nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2416.241236] env[61473]: Faults: ['InvalidArgument'] [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Traceback (most recent call last): [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/compute/manager.py", line 2890, in _build_resources [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] yield resources [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self.driver.spawn(context, instance, image_meta, [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self._fetch_image_if_missing(context, vi) [ 2416.241236] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] image_cache(vi, tmp_image_ds_loc) [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] vm_util.copy_virtual_disk( [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] session._wait_for_task(vmdk_copy_task) [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return self.wait_for_task(task_ref) [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return evt.wait() [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] result = hub.switch() [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2416.241763] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return self.greenlet.switch() [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self.f(*self.args, **self.kw) [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] raise exceptions.translate_fault(task_info.error) [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Faults: ['InvalidArgument'] [ 2416.242287] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] [ 2416.242287] env[61473]: INFO nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Terminating instance [ 2416.244254] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aa35b7fc-44b5-479c-b6c8-60930c581f0d/aa35b7fc-44b5-479c-b6c8-60930c581f0d.vmdk" {{(pid=61473) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2416.244530] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2416.245114] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2416.245307] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2416.245524] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2a2be55-423d-43fd-9964-93f398628e6b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.247718] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd9e23a-80c3-4919-9345-2217bdaeb4b0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.253838] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Unregistering the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2416.254048] env[61473]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf9eb466-a4e1-4a7d-b8de-af283f837d16 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.255967] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2416.256156] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61473) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2416.257207] env[61473]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe329073-89b6-4223-b4a8-c3176ab31d28 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.261444] env[61473]: DEBUG oslo_vmware.api [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Waiting for the task: (returnval){ [ 2416.261444] env[61473]: value = "session[52845204-529b-c7da-d5bb-e4686d5692e5]52a72398-abf6-3ac3-fd85-fbe2c235712a" [ 2416.261444] env[61473]: _type = "Task" [ 2416.261444] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.268974] env[61473]: DEBUG oslo_vmware.api [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Task: {'id': session[52845204-529b-c7da-d5bb-e4686d5692e5]52a72398-abf6-3ac3-fd85-fbe2c235712a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.321333] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Unregistered the VM {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2416.321570] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Deleting contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2416.321735] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleting the datastore file [datastore2] 7ef374d2-9dfc-420b-84f6-8dbcc8af59db {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2416.322075] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96ca4673-e772-4ab7-bc3b-9b79217c3b80 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.327694] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for the task: (returnval){ [ 2416.327694] env[61473]: value = "task-4281751" [ 2416.327694] env[61473]: _type = "Task" [ 2416.327694] env[61473]: } to complete. {{(pid=61473) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.334975] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.560036] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ef1ddf96-50fa-49f6-8ae0-3dec8e4b7389 tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Acquiring lock "ba226385-7cc1-4a50-bff1-e40c11b51471" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.772138] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Preparing fetch location {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2416.772464] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating directory with path [datastore2] vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2416.772579] env[61473]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b11107f0-6926-4142-a9f9-59c3340ada0f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.783304] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Created directory with path [datastore2] vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d {{(pid=61473) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2416.783493] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Fetch image to [datastore2] vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk {{(pid=61473) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2416.783665] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to [datastore2] vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61473) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2416.784415] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4261d62-5bea-4865-a03b-c7674bb8f18c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.791188] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a34088-7f23-478c-ab2f-361c9515abbd {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.800678] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fee23d-8484-4d11-9b50-35a7a905d9ee {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.835669] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16e676f-1bab-490b-9be5-8631fe7e96d0 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.843332] env[61473]: DEBUG oslo_vmware.api [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Task: {'id': task-4281751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07471} completed successfully. {{(pid=61473) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.844769] env[61473]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted the datastore file {{(pid=61473) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2416.844966] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Deleted contents of the VM from datastore datastore2 {{(pid=61473) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2416.845155] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2416.845327] env[61473]: INFO nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2416.847158] env[61473]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-512deefa-4979-48f6-82e0-2df21605ce76 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.849098] env[61473]: DEBUG nova.compute.claims [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Aborting claim: {{(pid=61473) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2416.849274] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.849491] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2416.870123] env[61473]: DEBUG nova.virt.vmwareapi.images [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Downloading image file data aa35b7fc-44b5-479c-b6c8-60930c581f0d to the data store datastore2 {{(pid=61473) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2416.933637] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2416.996367] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Completed reading data from the image iterator. {{(pid=61473) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2416.996563] env[61473]: DEBUG oslo_vmware.rw_handles [None req-5025429b-cec6-4ee7-aaac-4f52fa3d566e tempest-AttachVolumeNegativeTest-852519561 tempest-AttachVolumeNegativeTest-852519561-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0c0b904d-7717-444c-ae76-66c7025f8ec0/aa35b7fc-44b5-479c-b6c8-60930c581f0d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61473) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2417.062072] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb17da8-d220-455a-9ef1-ac3125222597 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.069703] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c786af-ad0d-472a-b3c8-d50982222306 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.098507] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7ea4e7-e074-40e1-94e4-67b4dc1c34d1 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.105421] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3078a3-19bc-4930-897a-036874e8f7ce {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.119061] env[61473]: DEBUG nova.compute.provider_tree [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2417.129408] env[61473]: DEBUG nova.scheduler.client.report [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2417.144132] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.294s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2417.144683] env[61473]: ERROR nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2417.144683] env[61473]: Faults: ['InvalidArgument'] [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Traceback (most recent call last): [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/compute/manager.py", line 2637, in _build_and_run_instance [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self.driver.spawn(context, instance, image_meta, [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self._fetch_image_if_missing(context, vi) [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] image_cache(vi, tmp_image_ds_loc) [ 2417.144683] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] vm_util.copy_virtual_disk( [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] session._wait_for_task(vmdk_copy_task) [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return self.wait_for_task(task_ref) [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return evt.wait() [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] result = hub.switch() [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] return self.greenlet.switch() [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2417.145056] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] self.f(*self.args, **self.kw) [ 2417.145358] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2417.145358] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] raise exceptions.translate_fault(task_info.error) [ 2417.145358] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2417.145358] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Faults: ['InvalidArgument'] [ 2417.145358] env[61473]: ERROR nova.compute.manager [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] [ 2417.145471] env[61473]: DEBUG nova.compute.utils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] VimFaultException {{(pid=61473) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2417.147351] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Build of instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db was re-scheduled: A specified parameter was not correct: fileType [ 2417.147351] env[61473]: Faults: ['InvalidArgument'] {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2476}} [ 2417.147663] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Unplugging VIFs for instance {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3002}} [ 2417.147663] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61473) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3025}} [ 2417.147808] env[61473]: DEBUG nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2417.147975] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2417.436730] env[61473]: DEBUG nova.network.neutron [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2417.449086] env[61473]: INFO nova.compute.manager [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Took 0.30 seconds to deallocate network for instance. [ 2417.551025] env[61473]: INFO nova.scheduler.client.report [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Deleted allocations for instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db [ 2417.571683] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba71a5c2-4057-41b0-a2eb-650806a77202 tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 651.987s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2417.571982] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 455.792s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.572226] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Acquiring lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2417.572436] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.572867] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2417.574749] env[61473]: INFO nova.compute.manager [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Terminating instance [ 2417.576379] env[61473]: DEBUG nova.compute.manager [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Start destroying the instance on the hypervisor. {{(pid=61473) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3146}} [ 2417.576560] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Destroying instance {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2417.577104] env[61473]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06a6f255-7e0b-4748-ac96-171276898479 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.588881] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4a7af4-c171-43b4-b4c4-de8b3e8adbd9 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.615220] env[61473]: WARNING nova.virt.vmwareapi.vmops [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ef374d2-9dfc-420b-84f6-8dbcc8af59db could not be found. [ 2417.615441] env[61473]: DEBUG nova.virt.vmwareapi.vmops [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Instance destroyed {{(pid=61473) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2417.615619] env[61473]: INFO nova.compute.manager [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2417.615857] env[61473]: DEBUG oslo.service.loopingcall [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61473) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2417.616339] env[61473]: DEBUG nova.compute.manager [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Deallocating network for instance {{(pid=61473) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2417.616442] env[61473]: DEBUG nova.network.neutron [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] deallocate_for_instance() {{(pid=61473) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2417.646046] env[61473]: DEBUG nova.network.neutron [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Updating instance_info_cache with network_info: [] {{(pid=61473) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2417.654535] env[61473]: INFO nova.compute.manager [-] [instance: 7ef374d2-9dfc-420b-84f6-8dbcc8af59db] Took 0.04 seconds to deallocate network for instance. [ 2417.741108] env[61473]: DEBUG oslo_concurrency.lockutils [None req-5507fb1f-f851-416e-8091-dd61159caacf tempest-DeleteServersTestJSON-2114412851 tempest-DeleteServersTestJSON-2114412851-project-member] Lock "7ef374d2-9dfc-420b-84f6-8dbcc8af59db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.968701] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2447.974367] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.962262] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.965794] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.965909] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61473) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10574}} [ 2451.967068] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2451.967068] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2451.967513] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.966590] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.966831] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager.update_available_resource {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.979893] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.980201] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.980305] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2452.980486] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61473) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2452.981577] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298e0756-8cf9-4fe7-9084-8d989b386e18 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.990300] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264e9bbc-0db4-4b49-984d-84bfd40397de {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.003796] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0961c76a-e8a3-4b43-ba79-11ac85a34866 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.009843] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e30a27-fc1c-4865-a01e-df8f7409d02b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.037940] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180654MB free_disk=180GB free_vcpus=48 pci_devices=None {{(pid=61473) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2453.038110] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2453.038306] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2453.116131] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 2f33fc61-3ea2-4818-918a-76cdae031a79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116301] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance bf13952e-d219-4c77-9a73-ada311eeb053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116428] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance b35a272f-6743-4e9e-8181-4e704bb3aa06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116547] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance 8a17da2e-1070-43a1-bc00-22d9b04dd806 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116663] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance f9a8791e-2e0e-40a0-9562-12c8545b900a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116777] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Instance ba226385-7cc1-4a50-bff1-e40c11b51471 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61473) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2453.116958] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2453.117107] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=183GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61473) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2453.132733] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing inventories for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2453.145208] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating ProviderTree inventory for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2453.145390] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Updating inventory in ProviderTree for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2453.156102] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing aggregate associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, aggregates: None {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2453.172708] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Refreshing trait associations for resource provider 89e0ead3-8356-4b9c-95ce-a1a119b67576, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=61473) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2453.245359] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83463e3-d203-4613-8504-70ed25bf029d {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.252668] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee189ce2-0c93-4f0e-9d27-1f8f62fa6e1c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.281160] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63b4935-31ec-4b12-9425-1a8c2ce15b5c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.287643] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e470941-a8b4-4fc8-a1ed-83657992794f {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.300224] env[61473]: DEBUG nova.compute.provider_tree [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2453.308874] env[61473]: DEBUG nova.scheduler.client.report [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2453.321958] env[61473]: DEBUG nova.compute.resource_tracker [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61473) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2453.322161] env[61473]: DEBUG oslo_concurrency.lockutils [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.284s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.322453] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2454.322791] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Starting heal instance info cache {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2454.322791] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Rebuilding the list of instances to heal {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9959}} [ 2454.341538] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 2f33fc61-3ea2-4818-918a-76cdae031a79] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.341695] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: bf13952e-d219-4c77-9a73-ada311eeb053] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.341825] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: b35a272f-6743-4e9e-8181-4e704bb3aa06] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.341951] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: 8a17da2e-1070-43a1-bc00-22d9b04dd806] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.342086] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: f9a8791e-2e0e-40a0-9562-12c8545b900a] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.342211] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] [instance: ba226385-7cc1-4a50-bff1-e40c11b51471] Skipping network cache update for instance because it is Building. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9968}} [ 2454.342329] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Didn't find any instances for network info cache update. {{(pid=61473) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10041}} [ 2454.966300] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2454.966498] env[61473]: DEBUG nova.compute.manager [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Cleaning up deleted instances with incomplete migration {{(pid=61473) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11293}} [ 2462.960456] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Acquiring lock "de2f6dba-5b10-4439-9883-498ed6e03fdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2462.960818] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Lock "de2f6dba-5b10-4439-9883-498ed6e03fdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2462.974241] env[61473]: DEBUG nova.compute.manager [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Starting instance... {{(pid=61473) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2428}} [ 2463.043023] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2463.043023] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.043023] env[61473]: INFO nova.compute.claims [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2463.231824] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9aa43e-b018-4340-80df-b25a612bdf72 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.239712] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6125045-d887-4a4f-a2c9-1f282a2a4fe8 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.275455] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dc7308-a031-4bf2-be98-9f897e48f5b4 {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.282571] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c91fa3-9be2-41e8-a706-8efde4a087de {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.295471] env[61473]: DEBUG nova.compute.provider_tree [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Inventory has not changed in ProviderTree for provider: 89e0ead3-8356-4b9c-95ce-a1a119b67576 {{(pid=61473) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2463.305251] env[61473]: DEBUG nova.scheduler.client.report [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Inventory has not changed for provider 89e0ead3-8356-4b9c-95ce-a1a119b67576 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 329, 'reserved': 0, 'min_unit': 1, 'max_unit': 180, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61473) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2463.320527] env[61473]: DEBUG oslo_concurrency.lockutils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.280s {{(pid=61473) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.321105] env[61473]: DEBUG nova.compute.manager [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Start building networks asynchronously for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2825}} [ 2463.356785] env[61473]: DEBUG nova.compute.utils [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Using /dev/sd instead of None {{(pid=61473) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2463.358056] env[61473]: DEBUG nova.compute.manager [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Allocating IP information in the background. {{(pid=61473) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 2463.358304] env[61473]: DEBUG nova.network.neutron [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] allocate_for_instance() {{(pid=61473) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2463.368209] env[61473]: DEBUG nova.compute.manager [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Start building block device mappings for instance. {{(pid=61473) _build_resources /opt/stack/nova/nova/compute/manager.py:2860}} [ 2463.417892] env[61473]: DEBUG nova.policy [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4e8e0fcb1414ca4b98aaee825128e2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '674083b59f274d918b9c0c6c3c9c45d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61473) authorize /opt/stack/nova/nova/policy.py:203}} [ 2463.443467] env[61473]: DEBUG nova.compute.manager [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Start spawning the instance on the hypervisor. {{(pid=61473) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2634}} [ 2463.471096] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-14T02:07:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-14T02:07:43Z,direct_url=,disk_format='vmdk',id=aa35b7fc-44b5-479c-b6c8-60930c581f0d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ceda3158edb84e739c8c130271e8cb2a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-14T02:07:44Z,virtual_size=,visibility=), allow threads: False {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2463.471403] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Flavor limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2463.471616] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Image limits 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2463.471871] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Flavor pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2463.472044] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Image pref 0:0:0 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2463.472290] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61473) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2463.472562] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2463.472744] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2463.472948] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Got 1 possible topologies {{(pid=61473) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2463.473167] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2463.473484] env[61473]: DEBUG nova.virt.hardware [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61473) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2463.474347] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffad3e69-508a-4856-8586-14b1b759b19b {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.482807] env[61473]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a36453b-595a-49e0-af29-cca7a1bdcb4c {{(pid=61473) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.778440] env[61473]: DEBUG nova.network.neutron [None req-ba2488ff-83c3-4e7b-b3f0-fb583a22d283 tempest-InstanceActionsNegativeTestJSON-311779670 tempest-InstanceActionsNegativeTestJSON-311779670-project-member] [instance: de2f6dba-5b10-4439-9883-498ed6e03fdb] Successfully created port: 476eae40-1553-4504-90f4-9e681b8e1f55 {{(pid=61473) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2463.969585] env[61473]: DEBUG oslo_service.periodic_task [None req-8bde751b-31e3-4832-9881-fc8830e83fde None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61473) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}